From 49804bfef767f04125b2d050073fc1fa1d564cd1 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 13 Jul 2022 22:42:06 -0400 Subject: [PATCH 001/111] chore(python): allow client documentation to be customized in README (#361) Source-Link: https://github.com/googleapis/synthtool/commit/95d9289ac3dc1ca2edae06619c82fe7a24d555f1 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:c8878270182edaab99f2927969d4f700c3af265accd472c3425deedff2b7fd93 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- README.rst | 8 ++++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 1ce60852..58fcbeee 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c -# created: 2022-07-05T18:31:20.838186805Z + digest: sha256:c8878270182edaab99f2927969d4f700c3af265accd472c3425deedff2b7fd93 +# created: 2022-07-14T01:58:16.015625351Z diff --git a/README.rst b/README.rst index a258b7d9..46fa4e0c 100644 --- a/README.rst +++ b/README.rst @@ -55,18 +55,22 @@ Code samples and snippets live in the `samples/` folder. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Our client libraries are compatible with all current [active](https://devguide.python.org/devcycle/#in-development-main-branch) and [maintenance](https://devguide.python.org/devcycle/#maintenance-branches) versions of +Our client libraries are compatible with all current `active`_ and `maintenance`_ versions of Python. Python >= 3.7 +.. _active: https://devguide.python.org/devcycle/#in-development-main-branch +.. _maintenance: https://devguide.python.org/devcycle/#maintenance-branches + Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ Python <= 3.6 -If you are using an [end-of-life](https://devguide.python.org/devcycle/#end-of-life-branches) +If you are using an `end-of-life`_ version of Python, we recommend that you update as soon as possible to an actively supported version. +.. _end-of-life: https://devguide.python.org/devcycle/#end-of-life-branches Mac/Linux ^^^^^^^^^ From f53a2fa49567035a1a3bb94d13444dd65bd104c6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 14 Jul 2022 15:08:10 +0000 Subject: [PATCH 002/111] fix(deps): require google-api-core>=1.32.0,>=2.8.0 (#354) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 459095142 Source-Link: https://github.com/googleapis/googleapis/commit/4f1be992601ed740a581a32cedc4e7b6c6a27793 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ae686d9cde4fc3e36d0ac02efb8643b15890c1ed Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWU2ODZkOWNkZTRmYzNlMzZkMGFjMDJlZmI4NjQzYjE1ODkwYzFlZCJ9 feat: add audience parameter PiperOrigin-RevId: 456827138 Source-Link: https://github.com/googleapis/googleapis/commit/23f1a157189581734c7a77cddfeb7c5bc1e440ae Source-Link: https://github.com/googleapis/googleapis-gen/commit/4075a8514f676691ec156688a5bbf183aa9893ce Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDA3NWE4NTE0ZjY3NjY5MWVjMTU2Njg4YTViYmYxODNhYTk4OTNjZSJ9 --- .../services/error_group_service/client.py | 1 + .../error_group_service/transports/base.py | 16 ++++-- .../error_group_service/transports/grpc.py | 2 + .../transports/grpc_asyncio.py | 2 + .../services/error_stats_service/client.py | 1 + .../error_stats_service/transports/base.py | 16 ++++-- .../error_stats_service/transports/grpc.py | 2 + .../transports/grpc_asyncio.py | 2 + .../services/report_errors_service/client.py | 1 + .../report_errors_service/transports/base.py | 16 ++++-- .../report_errors_service/transports/grpc.py | 2 + .../transports/grpc_asyncio.py | 2 + setup.py | 5 +- testing/constraints-3.6.txt | 11 ---- testing/constraints-3.7.txt | 2 +- .../test_error_group_service.py | 52 +++++++++++++++++++ .../test_error_stats_service.py | 52 +++++++++++++++++++ .../test_report_errors_service.py | 52 +++++++++++++++++++ 18 files changed, 206 insertions(+), 31 deletions(-) delete mode 100644 testing/constraints-3.6.txt diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py index eec0a0ae..df10c969 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py @@ -419,6 +419,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def get_group( diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py index d7f0edd8..d2ef9e3f 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py @@ -55,6 +55,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -82,11 +83,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -107,6 +103,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -119,6 +120,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py index c90e37f7..a4595425 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py @@ -59,6 +59,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -154,6 +155,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py index 718fc657..be98ee03 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py @@ -104,6 +104,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -199,6 +200,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py index f3289f77..85de24ae 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py @@ -422,6 +422,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def list_group_stats( diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py index 0bdbe002..d7970fc8 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py @@ -54,6 +54,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -81,11 +82,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -106,6 +102,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -118,6 +119,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py index 8b9510b1..c2dbd844 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py @@ -59,6 +59,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -154,6 +155,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py index cce5c95c..9fd8c8fd 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py @@ -104,6 +104,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -199,6 +200,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py index 297c8d8e..0f18a72b 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py @@ -401,6 +401,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def report_error_event( diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py index 5da09490..e2ec3343 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py @@ -54,6 +54,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -81,11 +82,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -106,6 +102,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -118,6 +119,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py index aab08b85..2c08420c 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py @@ -58,6 +58,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -153,6 +154,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py index ae617404..def71459 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py @@ -103,6 +103,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -198,6 +199,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/setup.py b/setup.py index 6035154f..0dad9912 100644 --- a/setup.py +++ b/setup.py @@ -30,10 +30,7 @@ release_status = "Development Status :: 4 - Beta" dependencies = [ "google-cloud-logging>=1.14.0, <4.0.0dev", - # NOTE: Maintainers, please do not require google-api-core>=2.x.x - # Until this issue is closed - # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", + "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.15.0, <2.0.0dev", "protobuf >= 3.19.0, <4.0.0dev", ] diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt deleted file mode 100644 index cf48a7f8..00000000 --- a/testing/constraints-3.6.txt +++ /dev/null @@ -1,11 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List *all* library dependencies and extras in this file. -# Pin the version to the lower bound. -# -# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -# Then this file should have foo==1.14.0 -google-cloud-logging==1.14.0 -google-api-core==1.31.5 -proto-plus==1.15.0 -protobuf==3.19.0 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index cf48a7f8..1309e99a 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -6,6 +6,6 @@ # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 google-cloud-logging==1.14.0 -google-api-core==1.31.5 +google-api-core==1.32.0 proto-plus==1.15.0 protobuf==3.19.0 diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py index 96f3472f..bc5be3a3 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py @@ -233,6 +233,7 @@ def test_error_group_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -250,6 +251,7 @@ def test_error_group_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -267,6 +269,7 @@ def test_error_group_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -296,6 +299,25 @@ def test_error_group_service_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -373,6 +395,7 @@ def test_error_group_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -407,6 +430,7 @@ def test_error_group_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -429,6 +453,7 @@ def test_error_group_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -543,6 +568,7 @@ def test_error_group_service_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -581,6 +607,7 @@ def test_error_group_service_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -601,6 +628,7 @@ def test_error_group_service_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -639,6 +667,7 @@ def test_error_group_service_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -1362,6 +1391,28 @@ def test_error_group_service_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.ErrorGroupServiceGrpcTransport, + transports.ErrorGroupServiceGrpcAsyncIOTransport, + ], +) +def test_error_group_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -1832,4 +1883,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py index d3d0634b..956aa930 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py @@ -236,6 +236,7 @@ def test_error_stats_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -253,6 +254,7 @@ def test_error_stats_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -270,6 +272,7 @@ def test_error_stats_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -299,6 +302,25 @@ def test_error_stats_service_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -376,6 +398,7 @@ def test_error_stats_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -410,6 +433,7 @@ def test_error_stats_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -432,6 +456,7 @@ def test_error_stats_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -546,6 +571,7 @@ def test_error_stats_service_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -584,6 +610,7 @@ def test_error_stats_service_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -604,6 +631,7 @@ def test_error_stats_service_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -642,6 +670,7 @@ def test_error_stats_service_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -1996,6 +2025,28 @@ def test_error_stats_service_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.ErrorStatsServiceGrpcTransport, + transports.ErrorStatsServiceGrpcAsyncIOTransport, + ], +) +def test_error_stats_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2466,4 +2517,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py index 18494673..d89e0712 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py @@ -240,6 +240,7 @@ def test_report_errors_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -257,6 +258,7 @@ def test_report_errors_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -274,6 +276,7 @@ def test_report_errors_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -303,6 +306,25 @@ def test_report_errors_service_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -380,6 +402,7 @@ def test_report_errors_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -414,6 +437,7 @@ def test_report_errors_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -436,6 +460,7 @@ def test_report_errors_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -554,6 +579,7 @@ def test_report_errors_service_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -592,6 +618,7 @@ def test_report_errors_service_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -612,6 +639,7 @@ def test_report_errors_service_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -650,6 +678,7 @@ def test_report_errors_service_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -1161,6 +1190,28 @@ def test_report_errors_service_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.ReportErrorsServiceGrpcTransport, + transports.ReportErrorsServiceGrpcAsyncIOTransport, + ], +) +def test_report_errors_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -1608,4 +1659,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) From 8094a0ae2dae28e7f6a42d57b79bab9b3d5d07ff Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 14 Jul 2022 11:27:15 -0400 Subject: [PATCH 003/111] chore(main): release 1.6.0 (#359) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 13 +++++++++++++ setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 30c79b90..3a828e48 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,19 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## [1.6.0](https://github.com/googleapis/python-error-reporting/compare/v1.5.3...v1.6.0) (2022-07-14) + + +### Features + +* add audience parameter ([f53a2fa](https://github.com/googleapis/python-error-reporting/commit/f53a2fa49567035a1a3bb94d13444dd65bd104c6)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#354](https://github.com/googleapis/python-error-reporting/issues/354)) ([f53a2fa](https://github.com/googleapis/python-error-reporting/commit/f53a2fa49567035a1a3bb94d13444dd65bd104c6)) +* require python 3.7+ ([#358](https://github.com/googleapis/python-error-reporting/issues/358)) ([ab0a9ba](https://github.com/googleapis/python-error-reporting/commit/ab0a9bacf9594ce3ff4c521413a20a2995533032)) + ## [1.5.3](https://github.com/googleapis/python-error-reporting/compare/v1.5.2...v1.5.3) (2022-06-07) diff --git a/setup.py b/setup.py index 0dad9912..b83681ac 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-error-reporting" description = "Error Reporting API client library" -version = "1.5.3" +version = "1.6.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 24606c93f670ea4a9954b077ff055f8e8ddcd057 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 14 Jul 2022 19:08:43 +0200 Subject: [PATCH 004/111] chore(deps): update all dependencies (#351) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- samples/snippets/api/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/api/requirements.txt b/samples/snippets/api/requirements.txt index d44c4105..ef06f3a0 100644 --- a/samples/snippets/api/requirements.txt +++ b/samples/snippets/api/requirements.txt @@ -1 +1 @@ -google-cloud-error-reporting==1.5.2 +google-cloud-error-reporting==1.5.3 From 5f170741c22d09ce2e1d763084c04fdfc35abf22 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 25 Jul 2022 22:33:29 -0400 Subject: [PATCH 005/111] chore(python): fix prerelease session [autoapprove] (#362) Source-Link: https://github.com/googleapis/synthtool/commit/1b9ad7694e44ddb4d9844df55ff7af77b51a4435 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:9db98b055a7f8bd82351238ccaacfd3cda58cdf73012ab58b8da146368330021 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- noxfile.py | 33 ++++++++++++++++++--------------- 2 files changed, 20 insertions(+), 17 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 58fcbeee..0eb02fda 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:c8878270182edaab99f2927969d4f700c3af265accd472c3425deedff2b7fd93 -# created: 2022-07-14T01:58:16.015625351Z + digest: sha256:9db98b055a7f8bd82351238ccaacfd3cda58cdf73012ab58b8da146368330021 +# created: 2022-07-25T16:02:49.174178716Z diff --git a/noxfile.py b/noxfile.py index 001ec73c..70739b12 100644 --- a/noxfile.py +++ b/noxfile.py @@ -333,7 +333,8 @@ def prerelease_deps(session): # Install all dependencies session.install("-e", ".[all, tests, tracing]") - session.install(*UNIT_TEST_STANDARD_DEPENDENCIES) + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES @@ -362,12 +363,6 @@ def prerelease_deps(session): session.install(*constraints_deps) - if os.path.exists("samples/snippets/requirements.txt"): - session.install("-r", "samples/snippets/requirements.txt") - - if os.path.exists("samples/snippets/requirements-test.txt"): - session.install("-r", "samples/snippets/requirements-test.txt") - prerel_deps = [ "protobuf", # dependency of grpc @@ -404,11 +399,19 @@ def prerelease_deps(session): system_test_folder_path = os.path.join("tests", "system") # Only run system tests if found. - if os.path.exists(system_test_path) or os.path.exists(system_test_folder_path): - session.run("py.test", "tests/system") - - snippets_test_path = os.path.join("samples", "snippets") - - # Only run samples tests if found. - if os.path.exists(snippets_test_path): - session.run("py.test", "samples/snippets") + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) From ef3e68245d2a0f3836a5888c88bb4db4b61cec04 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 2 Aug 2022 14:48:39 +0200 Subject: [PATCH 006/111] chore(deps): update all dependencies (#363) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- samples/snippets/api/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/api/requirements.txt b/samples/snippets/api/requirements.txt index ef06f3a0..fcddb2d1 100644 --- a/samples/snippets/api/requirements.txt +++ b/samples/snippets/api/requirements.txt @@ -1 +1 @@ -google-cloud-error-reporting==1.5.3 +google-cloud-error-reporting==1.6.0 From 375d133fb2543c665abb4e0d5420c97b26a9c8c3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 9 Aug 2022 21:05:12 -0400 Subject: [PATCH 007/111] chore(deps): update actions/setup-python action to v4 [autoapprove] (#365) Source-Link: https://github.com/googleapis/synthtool/commit/8e55b327bae44b6640c7ab4be91df85fc4d6fe8a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:c6c965a4bf40c19011b11f87dbc801a66d3a23fbc6704102be064ef31c51f1c3 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .github/workflows/docs.yml | 4 ++-- .github/workflows/lint.yml | 2 +- .github/workflows/unittest.yml | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 0eb02fda..c701359f 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:9db98b055a7f8bd82351238ccaacfd3cda58cdf73012ab58b8da146368330021 -# created: 2022-07-25T16:02:49.174178716Z + digest: sha256:c6c965a4bf40c19011b11f87dbc801a66d3a23fbc6704102be064ef31c51f1c3 +# created: 2022-08-09T15:58:56.463048506Z diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index b46d7305..7092a139 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install nox @@ -26,7 +26,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install nox diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index f512a496..d2aee5b7 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install nox diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 5531b014..87ade4d5 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} - name: Install nox @@ -39,7 +39,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install coverage From 9535a289a458badf7406688d3e9a77f0e580d0a8 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 11 Aug 2022 19:14:16 -0400 Subject: [PATCH 008/111] fix(deps): allow protobuf < 5.0.0 (#366) fix(deps): require proto-plus >= 1.22.0 --- setup.py | 4 ++-- testing/constraints-3.7.txt | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/setup.py b/setup.py index b83681ac..0152403a 100644 --- a/setup.py +++ b/setup.py @@ -31,8 +31,8 @@ dependencies = [ "google-cloud-logging>=1.14.0, <4.0.0dev", "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", - "proto-plus >= 1.15.0, <2.0.0dev", - "protobuf >= 3.19.0, <4.0.0dev", + "proto-plus >= 1.22.0, <2.0.0dev", + "protobuf >= 3.19.0, <5.0.0dev", ] extras = {} diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 1309e99a..74872487 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -7,5 +7,5 @@ # Then this file should have foo==1.14.0 google-cloud-logging==1.14.0 google-api-core==1.32.0 -proto-plus==1.15.0 +proto-plus==1.22.0 protobuf==3.19.0 From 951bd3c2bd8619b0f62678e2dc8b0af599813662 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 12 Aug 2022 10:24:50 -0700 Subject: [PATCH 009/111] chore: added extra variables owlbot kokoro configs (#368) --- .kokoro/common_env_vars.cfg | 19 +++++++++++++++++++ .kokoro/continuous/common.cfg | 20 ++++++++++++++++++++ .kokoro/docs/common.cfg | 21 ++++++++++++++++++++- .kokoro/presubmit/common.cfg | 20 ++++++++++++++++++++ .kokoro/release/common.cfg | 20 ++++++++++++++++++++ .kokoro/samples/lint/common.cfg | 21 ++++++++++++++++++++- .kokoro/samples/python3.10/common.cfg | 21 ++++++++++++++++++++- .kokoro/samples/python3.7/common.cfg | 21 ++++++++++++++++++++- .kokoro/samples/python3.8/common.cfg | 21 ++++++++++++++++++++- .kokoro/samples/python3.9/common.cfg | 21 ++++++++++++++++++++- owlbot.py | 18 ++++++++++++++++++ 11 files changed, 217 insertions(+), 6 deletions(-) create mode 100644 .kokoro/common_env_vars.cfg diff --git a/.kokoro/common_env_vars.cfg b/.kokoro/common_env_vars.cfg new file mode 100644 index 00000000..b5c22b80 --- /dev/null +++ b/.kokoro/common_env_vars.cfg @@ -0,0 +1,19 @@ + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "error-reporting" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/.kokoro/continuous/common.cfg b/.kokoro/continuous/common.cfg index ccbc23c7..c337b6d8 100644 --- a/.kokoro/continuous/common.cfg +++ b/.kokoro/continuous/common.cfg @@ -25,3 +25,23 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-error-reporting/.kokoro/build.sh" } + + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "error-reporting" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index 2603e22d..eade26cf 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -63,4 +63,23 @@ before_action { keyname: "docuploader_service_account" } } -} \ No newline at end of file +} + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "error-reporting" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/.kokoro/presubmit/common.cfg b/.kokoro/presubmit/common.cfg index ccbc23c7..c337b6d8 100644 --- a/.kokoro/presubmit/common.cfg +++ b/.kokoro/presubmit/common.cfg @@ -25,3 +25,23 @@ env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-error-reporting/.kokoro/build.sh" } + + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "error-reporting" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index c4f805b8..5e979115 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -38,3 +38,23 @@ env_vars: { key: "SECRET_MANAGER_KEYS" value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } + + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "error-reporting" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg index a38013a4..3f98b562 100644 --- a/.kokoro/samples/lint/common.cfg +++ b/.kokoro/samples/lint/common.cfg @@ -31,4 +31,23 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-error-reporting/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-error-reporting/.kokoro/trampoline_v2.sh" + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "error-reporting" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/.kokoro/samples/python3.10/common.cfg b/.kokoro/samples/python3.10/common.cfg index 2ed420e5..d19172ae 100644 --- a/.kokoro/samples/python3.10/common.cfg +++ b/.kokoro/samples/python3.10/common.cfg @@ -37,4 +37,23 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-error-reporting/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-error-reporting/.kokoro/trampoline_v2.sh" + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "error-reporting" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index 161c018b..ad162db9 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -37,4 +37,23 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-error-reporting/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-error-reporting/.kokoro/trampoline_v2.sh" + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "error-reporting" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index abd2a5bb..b495960b 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -37,4 +37,23 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-error-reporting/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-error-reporting/.kokoro/trampoline_v2.sh" + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "error-reporting" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg index 4889669b..01a9e9d3 100644 --- a/.kokoro/samples/python3.9/common.cfg +++ b/.kokoro/samples/python3.9/common.cfg @@ -37,4 +37,23 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-error-reporting/.kokoro/trampoline_v2.sh" \ No newline at end of file +build_file: "python-error-reporting/.kokoro/trampoline_v2.sh" + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "error-reporting" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/owlbot.py b/owlbot.py index 5bc3f034..b192af9a 100644 --- a/owlbot.py +++ b/owlbot.py @@ -16,6 +16,7 @@ import synthtool as s from synthtool import gcp from synthtool.languages import python +import os common = gcp.CommonTemplates() @@ -45,3 +46,20 @@ s.shell.run(["nox", "-s", "blacken"], hide_output=False) + +# -------------------------------------------------------------------------- +# Modify test configs +# -------------------------------------------------------------------------- + +# add shared environment variables to test configs +tracked_subdirs = ["continuous", "presubmit", "release", "samples", "docs"] +for subdir in tracked_subdirs: + for path, subdirs, files in os.walk(f".kokoro/{subdir}"): + for name in files: + if name == "common.cfg": + file_path = os.path.join(path, name) + s.move( + ".kokoro/common_env_vars.cfg", + file_path, + merge=lambda src, dst, _, : f"{dst}\n{src}", + ) From d512365684f1032be1b5f86a6dc70fa4ff5c1153 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 16 Aug 2022 11:51:58 -0400 Subject: [PATCH 010/111] chore(main): release 1.6.1 (#367) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 8 ++++++++ setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3a828e48..002eac0d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## [1.6.1](https://github.com/googleapis/python-error-reporting/compare/v1.6.0...v1.6.1) (2022-08-12) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#366](https://github.com/googleapis/python-error-reporting/issues/366)) ([9535a28](https://github.com/googleapis/python-error-reporting/commit/9535a289a458badf7406688d3e9a77f0e580d0a8)) +* **deps:** require proto-plus >= 1.22.0 ([9535a28](https://github.com/googleapis/python-error-reporting/commit/9535a289a458badf7406688d3e9a77f0e580d0a8)) + ## [1.6.0](https://github.com/googleapis/python-error-reporting/compare/v1.5.3...v1.6.0) (2022-07-14) diff --git a/setup.py b/setup.py index 0152403a..d5873865 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-error-reporting" description = "Error Reporting API client library" -version = "1.6.0" +version = "1.6.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 42237cd1b3bb05b7b382836e0f9605ed533b9851 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 19 Aug 2022 18:35:00 +0200 Subject: [PATCH 011/111] chore(deps): update dependency google-cloud-error-reporting to v1.6.1 (#371) --- samples/snippets/api/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/api/requirements.txt b/samples/snippets/api/requirements.txt index fcddb2d1..083d64d0 100644 --- a/samples/snippets/api/requirements.txt +++ b/samples/snippets/api/requirements.txt @@ -1 +1 @@ -google-cloud-error-reporting==1.6.0 +google-cloud-error-reporting==1.6.1 From d364cf9910291923ae8bdb5bfaafeb6d7ab8f2b7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 24 Aug 2022 15:36:43 -0400 Subject: [PATCH 012/111] chore: remove 'pip install' statements from python_library templates [autoapprove] (#373) Source-Link: https://github.com/googleapis/synthtool/commit/69fabaee9eca28af7ecaa02c86895e606fbbebd6 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:562802bfac02e012a6ac34eda282f81d06e77326b82a32d7bbb1369ff552b387 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +- .kokoro/publish-docs.sh | 4 +- .kokoro/release.sh | 5 +- .kokoro/requirements.in | 8 + .kokoro/requirements.txt | 464 ++++++++++++++++++++++++++++++++++++++ renovate.json | 2 +- 6 files changed, 477 insertions(+), 10 deletions(-) create mode 100644 .kokoro/requirements.in create mode 100644 .kokoro/requirements.txt diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index c701359f..c6acdf3f 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:c6c965a4bf40c19011b11f87dbc801a66d3a23fbc6704102be064ef31c51f1c3 -# created: 2022-08-09T15:58:56.463048506Z + digest: sha256:562802bfac02e012a6ac34eda282f81d06e77326b82a32d7bbb1369ff552b387 +# created: 2022-08-24T17:07:22.006876712Z diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 8acb14e8..1c4d6237 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -21,14 +21,12 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3 -m pip install --user --upgrade --quiet nox +python3 -m pip install --require-hashes -r .kokoro/requirements.txt python3 -m nox --version # build docs nox -s docs -python3 -m pip install --user gcp-docuploader - # create metadata python3 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 086e7e24..2efe6b7e 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -16,12 +16,9 @@ set -eo pipefail # Start the releasetool reporter -python3 -m pip install gcp-releasetool +python3 -m pip install --require-hashes -r .kokoro/requirements.txt python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script -# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. -python3 -m pip install --upgrade twine wheel setuptools - # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in new file mode 100644 index 00000000..7718391a --- /dev/null +++ b/.kokoro/requirements.in @@ -0,0 +1,8 @@ +gcp-docuploader +gcp-releasetool +importlib-metadata +typing-extensions +twine +wheel +setuptools +nox \ No newline at end of file diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt new file mode 100644 index 00000000..c4b824f2 --- /dev/null +++ b/.kokoro/requirements.txt @@ -0,0 +1,464 @@ +# +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==2.0.0 \ + --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ + --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e + # via nox +attrs==22.1.0 \ + --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ + --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c + # via gcp-releasetool +bleach==5.0.1 \ + --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ + --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c + # via readme-renderer +cachetools==5.2.0 \ + --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ + --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db + # via google-auth +certifi==2022.6.15 \ + --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ + --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 + # via requests +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 + # via cryptography +charset-normalizer==2.1.1 \ + --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ + --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f + # via requests +click==8.0.4 \ + --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ + --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb + # via + # gcp-docuploader + # gcp-releasetool +colorlog==6.6.0 \ + --hash=sha256:344f73204009e4c83c5b6beb00b3c45dc70fcdae3c80db919e0a4171d006fde8 \ + --hash=sha256:351c51e866c86c3217f08e4b067a7974a678be78f07f85fc2d55b8babde6d94e + # via + # gcp-docuploader + # nox +commonmark==0.9.1 \ + --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ + --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 + # via rich +cryptography==37.0.4 \ + --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ + --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ + --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ + --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ + --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ + --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ + --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ + --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ + --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ + --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ + --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ + --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ + --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ + --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ + --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ + --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ + --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ + --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ + --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ + --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ + --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ + --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 + # via + # gcp-releasetool + # secretstorage +distlib==0.3.5 \ + --hash=sha256:a7f75737c70be3b25e2bee06288cec4e4c221de18455b2dd037fe2a795cab2fe \ + --hash=sha256:b710088c59f06338ca514800ad795a132da19fda270e3ce4affc74abf955a26c + # via virtualenv +docutils==0.19 \ + --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ + --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc + # via readme-renderer +filelock==3.8.0 \ + --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ + --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 + # via virtualenv +gcp-docuploader==0.6.3 \ + --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ + --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b + # via -r requirements.in +gcp-releasetool==1.8.6 \ + --hash=sha256:42e51ab8e2e789bc8e22a03c09352962cd3452951c801a2230d564816630304a \ + --hash=sha256:a3518b79d1b243c494eac392a01c7fd65187fd6d52602dcab9b529bc934d4da1 + # via -r requirements.in +google-api-core==2.8.2 \ + --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ + --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.11.0 \ + --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ + --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb + # via + # gcp-releasetool + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.3.2 \ + --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ + --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a + # via google-cloud-storage +google-cloud-storage==2.5.0 \ + --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ + --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 + # via gcp-docuploader +google-crc32c==1.3.0 \ + --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ + --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ + --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ + --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ + --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ + --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ + --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ + --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ + --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ + --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ + --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ + --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ + --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ + --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ + --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ + --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ + --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ + --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ + --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ + --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ + --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ + --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ + --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ + --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ + --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ + --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ + --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ + --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ + --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ + --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ + --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ + --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ + --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ + --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ + --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ + --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ + --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ + --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ + --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ + --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ + --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ + --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ + --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 + # via google-resumable-media +google-resumable-media==2.3.3 \ + --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ + --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 + # via google-cloud-storage +googleapis-common-protos==1.56.4 \ + --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ + --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 + # via google-api-core +idna==3.3 \ + --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ + --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d + # via requests +importlib-metadata==4.12.0 \ + --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ + --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 + # via + # -r requirements.in + # twine +jeepney==0.8.0 \ + --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ + --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 + # via + # keyring + # secretstorage +jinja2==3.1.2 \ + --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ + --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 + # via gcp-releasetool +keyring==23.8.2 \ + --hash=sha256:0d9973f8891850f1ade5f26aafd06bb16865fbbae3fc56b0defb6a14a2624003 \ + --hash=sha256:10d2a8639663fe2090705a00b8c47c687cacdf97598ea9c11456679fa974473a + # via + # gcp-releasetool + # twine +markupsafe==2.1.1 \ + --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ + --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ + --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ + --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ + --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ + --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ + --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ + --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ + --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ + --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ + --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ + --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ + --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ + --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ + --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ + --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ + --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ + --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ + --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ + --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ + --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ + --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ + --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ + --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ + --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ + --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ + --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ + --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ + --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ + --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ + --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ + --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ + --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ + --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ + --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ + --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ + --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ + --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ + --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ + --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 + # via jinja2 +nox==2022.8.7 \ + --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ + --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c + # via -r requirements.in +packaging==21.3 \ + --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ + --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 + # via + # gcp-releasetool + # nox +pkginfo==1.8.3 \ + --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ + --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c + # via twine +platformdirs==2.5.2 \ + --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ + --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 + # via virtualenv +protobuf==3.20.1 \ + --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \ + --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \ + --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \ + --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \ + --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \ + --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \ + --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \ + --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \ + --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \ + --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \ + --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \ + --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \ + --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \ + --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \ + --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \ + --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \ + --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \ + --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \ + --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \ + --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \ + --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \ + --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \ + --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \ + --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3 + # via + # gcp-docuploader + # gcp-releasetool + # google-api-core +py==1.11.0 \ + --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ + --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 + # via nox +pyasn1==0.4.8 \ + --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ + --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 \ + --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ + --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 + # via google-auth +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +pygments==2.13.0 \ + --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ + --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 + # via + # readme-renderer + # rich +pyjwt==2.4.0 \ + --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ + --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba + # via gcp-releasetool +pyparsing==3.0.9 \ + --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ + --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc + # via packaging +pyperclip==1.8.2 \ + --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 + # via gcp-releasetool +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 + # via gcp-releasetool +readme-renderer==37.0 \ + --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ + --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 + # via twine +requests==2.28.1 \ + --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ + --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 + # via + # gcp-releasetool + # google-api-core + # google-cloud-storage + # requests-toolbelt + # twine +requests-toolbelt==0.9.1 \ + --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ + --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 + # via twine +rfc3986==2.0.0 \ + --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ + --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c + # via twine +rich==12.5.1 \ + --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ + --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca + # via twine +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +secretstorage==3.3.3 \ + --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ + --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 + # via keyring +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via + # bleach + # gcp-docuploader + # google-auth + # python-dateutil +twine==4.0.1 \ + --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ + --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 + # via -r requirements.in +typing-extensions==4.3.0 \ + --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ + --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 + # via -r requirements.in +urllib3==1.26.12 \ + --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ + --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 + # via + # requests + # twine +virtualenv==20.16.3 \ + --hash=sha256:4193b7bc8a6cd23e4eb251ac64f29b4398ab2c233531e66e40b19a6b7b0d30c1 \ + --hash=sha256:d86ea0bb50e06252d79e6c241507cb904fcd66090c3271381372d6221a3970f9 + # via nox +webencodings==0.5.1 \ + --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ + --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 + # via bleach +wheel==0.37.1 \ + --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ + --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 + # via -r requirements.in +zipp==3.8.1 \ + --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ + --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +setuptools==65.2.0 \ + --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ + --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 + # via -r requirements.in diff --git a/renovate.json b/renovate.json index c21036d3..566a70f3 100644 --- a/renovate.json +++ b/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } From 9ebb3ef782dda0bec5b0a78c943bc3bb907ef1c0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 29 Aug 2022 15:22:09 -0400 Subject: [PATCH 013/111] chore(python): exclude `grpcio==1.49.0rc1` in tests (#374) Source-Link: https://github.com/googleapis/synthtool/commit/c4dd5953003d13b239f872d329c3146586bb417e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ce3c1686bc81145c81dd269bd12c4025c6b275b22d14641358827334fddb1d72 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/requirements.txt | 6 +++--- noxfile.py | 7 +++++-- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index c6acdf3f..23e106b6 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:562802bfac02e012a6ac34eda282f81d06e77326b82a32d7bbb1369ff552b387 -# created: 2022-08-24T17:07:22.006876712Z + digest: sha256:ce3c1686bc81145c81dd269bd12c4025c6b275b22d14641358827334fddb1d72 +# created: 2022-08-29T17:28:30.441852797Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index c4b824f2..4b29ef24 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -136,9 +136,9 @@ cryptography==37.0.4 \ # via # gcp-releasetool # secretstorage -distlib==0.3.5 \ - --hash=sha256:a7f75737c70be3b25e2bee06288cec4e4c221de18455b2dd037fe2a795cab2fe \ - --hash=sha256:b710088c59f06338ca514800ad795a132da19fda270e3ce4affc74abf955a26c +distlib==0.3.6 \ + --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ + --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e # via virtualenv docutils==0.19 \ --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ diff --git a/noxfile.py b/noxfile.py index 70739b12..5f898a7a 100644 --- a/noxfile.py +++ b/noxfile.py @@ -188,7 +188,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") + # Exclude version 1.49.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/pull/30642 + session.install("--pre", "grpcio!=1.49.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -368,7 +370,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - "grpcio", + # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 + "grpcio!=1.49.0rc1", "grpcio-status", "google-api-core", "proto-plus", From b986098ef504d5e3307196b09e9b1e3c0449e87a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 1 Sep 2022 18:54:13 +0000 Subject: [PATCH 014/111] ci(python): fix path to requirements.txt in release script (#375) Source-Link: https://github.com/googleapis/synthtool/commit/fdba3ed145bdb2f4f3eff434d4284b1d03b80d34 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:1f0dbd02745fb7cf255563dab5968345989308544e52b7f460deadd5e78e63b0 --- .github/.OwlBot.lock.yaml | 3 +-- .kokoro/release.sh | 2 +- .kokoro/requirements.txt | 24 ++++++++++++------------ 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 23e106b6..0d9eb2af 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ce3c1686bc81145c81dd269bd12c4025c6b275b22d14641358827334fddb1d72 -# created: 2022-08-29T17:28:30.441852797Z + digest: sha256:1f0dbd02745fb7cf255563dab5968345989308544e52b7f460deadd5e78e63b0 diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 2efe6b7e..4a5ad17a 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -16,7 +16,7 @@ set -eo pipefail # Start the releasetool reporter -python3 -m pip install --require-hashes -r .kokoro/requirements.txt +python3 -m pip install --require-hashes -r github/python-error-reporting/.kokoro/requirements.txt python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script # Disable buffering, so that the logs stream through. diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 4b29ef24..92b2f727 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -100,9 +100,9 @@ click==8.0.4 \ # via # gcp-docuploader # gcp-releasetool -colorlog==6.6.0 \ - --hash=sha256:344f73204009e4c83c5b6beb00b3c45dc70fcdae3c80db919e0a4171d006fde8 \ - --hash=sha256:351c51e866c86c3217f08e4b067a7974a678be78f07f85fc2d55b8babde6d94e +colorlog==6.7.0 \ + --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ + --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 # via # gcp-docuploader # nox @@ -152,9 +152,9 @@ gcp-docuploader==0.6.3 \ --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b # via -r requirements.in -gcp-releasetool==1.8.6 \ - --hash=sha256:42e51ab8e2e789bc8e22a03c09352962cd3452951c801a2230d564816630304a \ - --hash=sha256:a3518b79d1b243c494eac392a01c7fd65187fd6d52602dcab9b529bc934d4da1 +gcp-releasetool==1.8.7 \ + --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ + --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d # via -r requirements.in google-api-core==2.8.2 \ --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ @@ -251,9 +251,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.8.2 \ - --hash=sha256:0d9973f8891850f1ade5f26aafd06bb16865fbbae3fc56b0defb6a14a2624003 \ - --hash=sha256:10d2a8639663fe2090705a00b8c47c687cacdf97598ea9c11456679fa974473a +keyring==23.9.0 \ + --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ + --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db # via # gcp-releasetool # twine @@ -440,9 +440,9 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.3 \ - --hash=sha256:4193b7bc8a6cd23e4eb251ac64f29b4398ab2c233531e66e40b19a6b7b0d30c1 \ - --hash=sha256:d86ea0bb50e06252d79e6c241507cb904fcd66090c3271381372d6221a3970f9 +virtualenv==20.16.4 \ + --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ + --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ From e2beb5825ec01e67b3b6bffc13b4ff5a6933d715 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 2 Sep 2022 00:52:28 +0000 Subject: [PATCH 015/111] chore(python): update .kokoro/requirements.txt (#376) Source-Link: https://github.com/googleapis/synthtool/commit/703554a14c7479542335b62fa69279f93a9e38ec Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.txt | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 0d9eb2af..2fa0f7c4 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:1f0dbd02745fb7cf255563dab5968345989308544e52b7f460deadd5e78e63b0 + digest: sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 92b2f727..385f2d4d 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -241,6 +241,10 @@ importlib-metadata==4.12.0 \ # via # -r requirements.in # twine +jaraco-classes==3.2.2 \ + --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ + --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 + # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 @@ -299,6 +303,10 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 +more-itertools==8.14.0 \ + --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ + --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 + # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c From dd3b10058fbcaf7187ac462da72ba12909afad39 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 2 Sep 2022 18:40:28 +0000 Subject: [PATCH 016/111] chore(python): exclude setup.py in renovate config (#380) Source-Link: https://github.com/googleapis/synthtool/commit/56da63e80c384a871356d1ea6640802017f213b4 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 --- .github/.OwlBot.lock.yaml | 2 +- renovate.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 2fa0f7c4..b8dcb4a4 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b + digest: sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 diff --git a/renovate.json b/renovate.json index 566a70f3..39b2a0ec 100644 --- a/renovate.json +++ b/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } From 4fd9627027b90d3d054408faccc1e52d9ba34f84 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 6 Sep 2022 17:41:08 +0200 Subject: [PATCH 017/111] chore(deps): update dependency pytest to v7.1.3 (#381) --- samples/snippets/api/requirements-test.txt | 2 +- samples/snippets/fluent_on_compute/requirements-test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/snippets/api/requirements-test.txt b/samples/snippets/api/requirements-test.txt index d00689e0..e0716850 100644 --- a/samples/snippets/api/requirements-test.txt +++ b/samples/snippets/api/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.2 +pytest==7.1.3 diff --git a/samples/snippets/fluent_on_compute/requirements-test.txt b/samples/snippets/fluent_on_compute/requirements-test.txt index fb466e50..6f722c66 100644 --- a/samples/snippets/fluent_on_compute/requirements-test.txt +++ b/samples/snippets/fluent_on_compute/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==7.1.2 +pytest==7.1.3 mock==4.0.3 From 65234790abebb31b6ed1e5a87449897822ac4529 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 7 Sep 2022 14:20:13 +0000 Subject: [PATCH 018/111] chore: Bump gapic-generator-python version to 1.3.0 (#382) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 472561635 Source-Link: https://github.com/googleapis/googleapis/commit/332ecf599f8e747d8d1213b77ae7db26eff12814 Source-Link: https://github.com/googleapis/googleapis-gen/commit/4313d682880fd9d7247291164d4e9d3d5bd9f177 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDMxM2Q2ODI4ODBmZDlkNzI0NzI5MTE2NGQ0ZTlkM2Q1YmQ5ZjE3NyJ9 --- .../error_group_service/async_client.py | 14 + .../services/error_group_service/client.py | 14 + .../error_stats_service/async_client.py | 21 ++ .../services/error_stats_service/client.py | 21 ++ .../report_errors_service/async_client.py | 7 + .../services/report_errors_service/client.py | 7 + mypy.ini | 2 +- ...ted_error_group_service_get_group_async.py | 7 + ...ated_error_group_service_get_group_sync.py | 7 + ..._error_group_service_update_group_async.py | 7 + ...d_error_group_service_update_group_sync.py | 7 + ...error_stats_service_delete_events_async.py | 7 + ..._error_stats_service_delete_events_sync.py | 7 + ...d_error_stats_service_list_events_async.py | 7 + ...ed_error_stats_service_list_events_sync.py | 7 + ...or_stats_service_list_group_stats_async.py | 7 + ...ror_stats_service_list_group_stats_sync.py | 7 + ...errors_service_report_error_event_async.py | 7 + ..._errors_service_report_error_event_sync.py | 7 + ...ippet_metadata_errorreporting_v1beta1.json | 240 +++++++++--------- 20 files changed, 289 insertions(+), 121 deletions(-) diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py index 0a03d9a3..8af3cd82 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py @@ -217,6 +217,13 @@ async def get_group( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 async def sample_get_group(): @@ -319,6 +326,13 @@ async def update_group( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 async def sample_update_group(): diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py index df10c969..86ae1120 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py @@ -435,6 +435,13 @@ def get_group( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 def sample_get_group(): @@ -537,6 +544,13 @@ def update_group( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 def sample_update_group(): diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py index 03095875..88b31900 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py @@ -221,6 +221,13 @@ async def list_group_stats( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 async def sample_list_group_stats(): @@ -353,6 +360,13 @@ async def list_events( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 async def sample_list_events(): @@ -477,6 +491,13 @@ async def delete_events( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 async def sample_delete_events(): diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py index 85de24ae..77690989 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py @@ -439,6 +439,13 @@ def list_group_stats( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 def sample_list_group_stats(): @@ -571,6 +578,13 @@ def list_events( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 def sample_list_events(): @@ -695,6 +709,13 @@ def delete_events( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 def sample_delete_events(): diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py index a8f85c33..ae912ecd 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py @@ -229,6 +229,13 @@ async def report_error_event( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 async def sample_report_error_event(): diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py index 0f18a72b..9d6f734a 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py @@ -433,6 +433,13 @@ def report_error_event( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 def sample_report_error_event(): diff --git a/mypy.ini b/mypy.ini index 4505b485..574c5aed 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,3 +1,3 @@ [mypy] -python_version = 3.6 +python_version = 3.7 namespace_packages = True diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py index 7a6e06fc..7b49e8b8 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py @@ -24,6 +24,13 @@ # [START clouderrorreporting_v1beta1_generated_ErrorGroupService_GetGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py index 9c8ac42b..28d64428 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py @@ -24,6 +24,13 @@ # [START clouderrorreporting_v1beta1_generated_ErrorGroupService_GetGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py index 4581e274..f0a68ab3 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py @@ -24,6 +24,13 @@ # [START clouderrorreporting_v1beta1_generated_ErrorGroupService_UpdateGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py index 67cb2a40..b021265a 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py @@ -24,6 +24,13 @@ # [START clouderrorreporting_v1beta1_generated_ErrorGroupService_UpdateGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py index e8f462be..e30df345 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py @@ -24,6 +24,13 @@ # [START clouderrorreporting_v1beta1_generated_ErrorStatsService_DeleteEvents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py index 75da055a..0b16d477 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py @@ -24,6 +24,13 @@ # [START clouderrorreporting_v1beta1_generated_ErrorStatsService_DeleteEvents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py index 8e3bd85f..9d25b821 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py @@ -24,6 +24,13 @@ # [START clouderrorreporting_v1beta1_generated_ErrorStatsService_ListEvents_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py index 9bd122f5..19cbf488 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py @@ -24,6 +24,13 @@ # [START clouderrorreporting_v1beta1_generated_ErrorStatsService_ListEvents_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py index d6176174..25ae2ff0 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py @@ -24,6 +24,13 @@ # [START clouderrorreporting_v1beta1_generated_ErrorStatsService_ListGroupStats_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py index 02053bde..0d779b46 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py @@ -24,6 +24,13 @@ # [START clouderrorreporting_v1beta1_generated_ErrorStatsService_ListGroupStats_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py index 333e710b..f836815d 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py @@ -24,6 +24,13 @@ # [START clouderrorreporting_v1beta1_generated_ReportErrorsService_ReportErrorEvent_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py index 4ffd5a4f..575f57aa 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py @@ -24,6 +24,13 @@ # [START clouderrorreporting_v1beta1_generated_ReportErrorsService_ReportErrorEvent_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google.cloud import errorreporting_v1beta1 diff --git a/samples/generated_samples/snippet_metadata_errorreporting_v1beta1.json b/samples/generated_samples/snippet_metadata_errorreporting_v1beta1.json index 6b5f5926..4d088480 100644 --- a/samples/generated_samples/snippet_metadata_errorreporting_v1beta1.json +++ b/samples/generated_samples/snippet_metadata_errorreporting_v1beta1.json @@ -59,33 +59,33 @@ "regionTag": "clouderrorreporting_v1beta1_generated_ErrorGroupService_GetGroup_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -139,33 +139,33 @@ "regionTag": "clouderrorreporting_v1beta1_generated_ErrorGroupService_GetGroup_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -220,33 +220,33 @@ "regionTag": "clouderrorreporting_v1beta1_generated_ErrorGroupService_UpdateGroup_async", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], @@ -300,33 +300,33 @@ "regionTag": "clouderrorreporting_v1beta1_generated_ErrorGroupService_UpdateGroup_sync", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], @@ -381,33 +381,33 @@ "regionTag": "clouderrorreporting_v1beta1_generated_ErrorStatsService_DeleteEvents_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -461,33 +461,33 @@ "regionTag": "clouderrorreporting_v1beta1_generated_ErrorStatsService_DeleteEvents_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -546,33 +546,33 @@ "regionTag": "clouderrorreporting_v1beta1_generated_ErrorStatsService_ListEvents_async", "segments": [ { - "end": 46, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 53, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 47, - "start": 43, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -630,33 +630,33 @@ "regionTag": "clouderrorreporting_v1beta1_generated_ErrorStatsService_ListEvents_sync", "segments": [ { - "end": 46, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 46, + "end": 53, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 47, - "start": 43, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -715,33 +715,33 @@ "regionTag": "clouderrorreporting_v1beta1_generated_ErrorStatsService_ListGroupStats_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -799,33 +799,33 @@ "regionTag": "clouderrorreporting_v1beta1_generated_ErrorStatsService_ListGroupStats_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -884,33 +884,33 @@ "regionTag": "clouderrorreporting_v1beta1_generated_ReportErrorsService_ReportErrorEvent_async", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 42, - "start": 34, + "end": 49, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 43, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -968,33 +968,33 @@ "regionTag": "clouderrorreporting_v1beta1_generated_ReportErrorsService_ReportErrorEvent_sync", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 42, - "start": 34, + "end": 49, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 43, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], From 3a662166f7eaad13092d042f7896bb7932656030 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 8 Sep 2022 13:50:29 +0000 Subject: [PATCH 019/111] chore: use gapic-generator-python 1.3.1 (#383) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 472772457 Source-Link: https://github.com/googleapis/googleapis/commit/855b74d203deeb0f7a0215f9454cdde62a1f9b86 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b64b1e7da3e138f15ca361552ef0545e54891b4f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjY0YjFlN2RhM2UxMzhmMTVjYTM2MTU1MmVmMDU0NWU1NDg5MWI0ZiJ9 --- .../gapic/errorreporting_v1beta1/test_error_group_service.py | 4 ++-- .../gapic/errorreporting_v1beta1/test_error_stats_service.py | 4 ++-- .../errorreporting_v1beta1/test_report_errors_service.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py index bc5be3a3..0594bf3f 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py @@ -18,8 +18,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py index 956aa930..a6998858 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py @@ -18,8 +18,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py index d89e0712..1271610a 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py @@ -18,8 +18,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc From 044800b429cdfbb9f156b0853d500b629d4bf715 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 13 Sep 2022 14:38:14 +0000 Subject: [PATCH 020/111] chore: use gapic generator python 1.4.1 (#384) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 473833416 Source-Link: https://github.com/googleapis/googleapis/commit/565a5508869557a3228b871101e4e4ebd8f93d11 Source-Link: https://github.com/googleapis/googleapis-gen/commit/1ee1a06c6de3ca8b843572c1fde0548f84236989 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMWVlMWEwNmM2ZGUzY2E4Yjg0MzU3MmMxZmRlMDU0OGY4NDIzNjk4OSJ9 --- .../gapic/errorreporting_v1beta1/test_error_group_service.py | 2 +- .../gapic/errorreporting_v1beta1/test_error_stats_service.py | 2 +- .../gapic/errorreporting_v1beta1/test_report_errors_service.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py index 0594bf3f..994c484c 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py @@ -27,7 +27,7 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api_core import client_options from google.api_core import exceptions as core_exceptions diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py index a6998858..c5fbcf64 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py @@ -27,7 +27,7 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api_core import client_options from google.api_core import exceptions as core_exceptions diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py index 1271610a..85eb3987 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py @@ -27,7 +27,7 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api_core import client_options from google.api_core import exceptions as core_exceptions From 9af6b8cb0d6ba04fb5b8a7d4a0fe4c56e2c0c48d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 13 Sep 2022 16:14:29 +0000 Subject: [PATCH 021/111] chore: detect samples tests in nested directories (#385) Source-Link: https://github.com/googleapis/synthtool/commit/50db768f450a50d7c1fd62513c113c9bb96fd434 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 --- .github/.OwlBot.lock.yaml | 2 +- samples/snippets/api/noxfile.py | 4 ++-- samples/snippets/fluent_on_compute/noxfile.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index b8dcb4a4..aa547962 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 + digest: sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 diff --git a/samples/snippets/api/noxfile.py b/samples/snippets/api/noxfile.py index 5fcb9d74..0398d72f 100644 --- a/samples/snippets/api/noxfile.py +++ b/samples/snippets/api/noxfile.py @@ -207,8 +207,8 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("*_test.py") + glob.glob("test_*.py") - test_list.extend(glob.glob("tests")) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: print("No tests found, skipping directory.") diff --git a/samples/snippets/fluent_on_compute/noxfile.py b/samples/snippets/fluent_on_compute/noxfile.py index 5fcb9d74..0398d72f 100644 --- a/samples/snippets/fluent_on_compute/noxfile.py +++ b/samples/snippets/fluent_on_compute/noxfile.py @@ -207,8 +207,8 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("*_test.py") + glob.glob("test_*.py") - test_list.extend(glob.glob("tests")) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: print("No tests found, skipping directory.") From adde212c5c37ecbfac9a7ccda9e1fa027c670e52 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 3 Oct 2022 12:15:03 -0400 Subject: [PATCH 022/111] fix(deps): require protobuf >= 3.20.2 (#388) * chore: exclude requirements.txt file from renovate-bot Source-Link: https://github.com/googleapis/synthtool/commit/f58d3135a2fab20e225d98741dbc06d57459b816 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 * update constraints files * fix(deps): require protobuf 3.20.2 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.txt | 49 ++++++++++++++++++------------------- setup.py | 2 +- testing/constraints-3.7.txt | 2 +- 4 files changed, 27 insertions(+), 28 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index aa547962..3815c983 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 + digest: sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 385f2d4d..d15994ba 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -325,31 +325,30 @@ platformdirs==2.5.2 \ --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 # via virtualenv -protobuf==3.20.1 \ - --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \ - --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \ - --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \ - --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \ - --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \ - --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \ - --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \ - --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \ - --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \ - --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \ - --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \ - --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \ - --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \ - --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \ - --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \ - --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \ - --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \ - --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \ - --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \ - --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \ - --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \ - --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \ - --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \ - --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3 +protobuf==3.20.2 \ + --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ + --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ + --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ + --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ + --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ + --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ + --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ + --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ + --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ + --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ + --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ + --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ + --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ + --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ + --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ + --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ + --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ + --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ + --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ + --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ + --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ + --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ + --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 # via # gcp-docuploader # gcp-releasetool diff --git a/setup.py b/setup.py index d5873865..56a1f5f6 100644 --- a/setup.py +++ b/setup.py @@ -32,7 +32,7 @@ "google-cloud-logging>=1.14.0, <4.0.0dev", "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.22.0, <2.0.0dev", - "protobuf >= 3.19.0, <5.0.0dev", + "protobuf >= 3.20.2, <5.0.0dev", ] extras = {} diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 74872487..0c033d54 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -8,4 +8,4 @@ google-cloud-logging==1.14.0 google-api-core==1.32.0 proto-plus==1.22.0 -protobuf==3.19.0 +protobuf==3.20.2 From 308ba20061e503897abc9fec9d11edd3bbece623 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 3 Oct 2022 20:17:37 -0400 Subject: [PATCH 023/111] chore(main): release 1.6.2 (#389) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 7 +++++++ setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 002eac0d..5edbdb74 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## [1.6.2](https://github.com/googleapis/python-error-reporting/compare/v1.6.1...v1.6.2) (2022-10-03) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#388](https://github.com/googleapis/python-error-reporting/issues/388)) ([adde212](https://github.com/googleapis/python-error-reporting/commit/adde212c5c37ecbfac9a7ccda9e1fa027c670e52)) + ## [1.6.1](https://github.com/googleapis/python-error-reporting/compare/v1.6.0...v1.6.1) (2022-08-12) diff --git a/setup.py b/setup.py index 56a1f5f6..7b360e05 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-error-reporting" description = "Error Reporting API client library" -version = "1.6.1" +version = "1.6.2" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 07aa0a80492605e74aa67480956c377bd6ad807d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 4 Oct 2022 15:35:06 +0200 Subject: [PATCH 024/111] chore(deps): update dependency google-cloud-error-reporting to v1.6.2 (#390) --- samples/snippets/api/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/api/requirements.txt b/samples/snippets/api/requirements.txt index 083d64d0..21654ae5 100644 --- a/samples/snippets/api/requirements.txt +++ b/samples/snippets/api/requirements.txt @@ -1 +1 @@ -google-cloud-error-reporting==1.6.1 +google-cloud-error-reporting==1.6.2 From 6a42c056a7535f4c43d7698525e19df655c91092 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 7 Oct 2022 16:46:49 -0400 Subject: [PATCH 025/111] fix(deps): allow protobuf 3.19.5 (#391) * fix(deps): allow protobuf 3.19.5 * explicitly exclude protobuf 4.21.0 --- setup.py | 2 +- testing/constraints-3.7.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 7b360e05..f19e35ed 100644 --- a/setup.py +++ b/setup.py @@ -32,7 +32,7 @@ "google-cloud-logging>=1.14.0, <4.0.0dev", "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.22.0, <2.0.0dev", - "protobuf >= 3.20.2, <5.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] extras = {} diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 0c033d54..c4e51385 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -8,4 +8,4 @@ google-cloud-logging==1.14.0 google-api-core==1.32.0 proto-plus==1.22.0 -protobuf==3.20.2 +protobuf==3.19.5 From 68070e17c47b062e1b7d307a09ce953d5dc5a086 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 10 Oct 2022 12:19:47 -0400 Subject: [PATCH 026/111] chore(main): release 1.6.3 (#392) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 7 +++++++ setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5edbdb74..945f6fb4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## [1.6.3](https://github.com/googleapis/python-error-reporting/compare/v1.6.2...v1.6.3) (2022-10-07) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#391](https://github.com/googleapis/python-error-reporting/issues/391)) ([6a42c05](https://github.com/googleapis/python-error-reporting/commit/6a42c056a7535f4c43d7698525e19df655c91092)) + ## [1.6.2](https://github.com/googleapis/python-error-reporting/compare/v1.6.1...v1.6.2) (2022-10-03) diff --git a/setup.py b/setup.py index f19e35ed..23a8f2ef 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-error-reporting" description = "Error Reporting API client library" -version = "1.6.2" +version = "1.6.3" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From b7b9340d6e409c88b9bb6b244584c6ba8dd65662 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 10 Oct 2022 19:57:27 +0200 Subject: [PATCH 027/111] chore(deps): update dependency google-cloud-error-reporting to v1.6.3 (#393) --- samples/snippets/api/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/api/requirements.txt b/samples/snippets/api/requirements.txt index 21654ae5..fd18a2fa 100644 --- a/samples/snippets/api/requirements.txt +++ b/samples/snippets/api/requirements.txt @@ -1 +1 @@ -google-cloud-error-reporting==1.6.2 +google-cloud-error-reporting==1.6.3 From d9f3587e441f1878455774e14a8ab797faf9a136 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 12 Oct 2022 11:53:42 -0700 Subject: [PATCH 028/111] chore: update blunderbuss asignments (#394) --- .github/blunderbuss.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml index 148ebf4e..a9d3f44e 100644 --- a/.github/blunderbuss.yml +++ b/.github/blunderbuss.yml @@ -1,4 +1,4 @@ assign_issues: - - Daniel-Sanche + - googleapis/api-logging-reviewers assign_prs: - - Daniel-Sanche + - googleapis/api-logging-reviewers From b33396e90566abbe1d905ac6b68bba187d66973b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 26 Oct 2022 12:55:19 +0200 Subject: [PATCH 029/111] chore(deps): update dependency pytest to v7.2.0 (#395) --- samples/snippets/api/requirements-test.txt | 2 +- samples/snippets/fluent_on_compute/requirements-test.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/snippets/api/requirements-test.txt b/samples/snippets/api/requirements-test.txt index e0716850..49780e03 100644 --- a/samples/snippets/api/requirements-test.txt +++ b/samples/snippets/api/requirements-test.txt @@ -1 +1 @@ -pytest==7.1.3 +pytest==7.2.0 diff --git a/samples/snippets/fluent_on_compute/requirements-test.txt b/samples/snippets/fluent_on_compute/requirements-test.txt index 6f722c66..980c425b 100644 --- a/samples/snippets/fluent_on_compute/requirements-test.txt +++ b/samples/snippets/fluent_on_compute/requirements-test.txt @@ -1,2 +1,2 @@ -pytest==7.1.3 +pytest==7.2.0 mock==4.0.3 From aafbb1316276016d8465eb628fe7e61b9cf18d7f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 8 Nov 2022 09:35:03 -0800 Subject: [PATCH 030/111] chore(python): update dependencies in .kokoro/requirements.txt (#397) Source-Link: https://github.com/googleapis/synthtool/commit/e3a1277ac35fc88c09db1930533e24292b132ced Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:452901c74a22f9b9a3bd02bce780b8e8805c97270d424684bff809ce5be8c2a2 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.txt | 325 +++++++++++++++++++++----------------- noxfile.py | 11 +- 3 files changed, 187 insertions(+), 151 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 3815c983..12edee77 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 + digest: sha256:452901c74a22f9b9a3bd02bce780b8e8805c97270d424684bff809ce5be8c2a2 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index d15994ba..31425f16 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.6.15 \ - --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ - --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 +certifi==2022.9.24 \ + --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ + --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ @@ -110,29 +110,33 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==37.0.4 \ - --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ - --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ - --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ - --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ - --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ - --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ - --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ - --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ - --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ - --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ - --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ - --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ - --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ - --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ - --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ - --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ - --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ - --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ - --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ - --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ - --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ - --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 +cryptography==38.0.3 \ + --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ + --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ + --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ + --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ + --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ + --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ + --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ + --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ + --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ + --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ + --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ + --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ + --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ + --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ + --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ + --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ + --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ + --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ + --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ + --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ + --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ + --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ + --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ + --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ + --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ + --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 # via # gcp-releasetool # secretstorage @@ -148,23 +152,23 @@ filelock==3.8.0 \ --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 # via virtualenv -gcp-docuploader==0.6.3 \ - --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ - --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b +gcp-docuploader==0.6.4 \ + --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ + --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.8.7 \ - --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ - --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d +gcp-releasetool==1.9.1 \ + --hash=sha256:952f4055d5d986b070ae2a71c4410b250000f9cc5a1e26398fcd55a5bbc5a15f \ + --hash=sha256:d0d3c814a97c1a237517e837d8cfa668ced8df4b882452578ecef4a4e79c583b # via -r requirements.in -google-api-core==2.8.2 \ - --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ - --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 +google-api-core==2.10.2 \ + --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ + --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e # via # google-cloud-core # google-cloud-storage -google-auth==2.11.0 \ - --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ - --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb +google-auth==2.14.0 \ + --hash=sha256:1ad5b0e6eba5f69645971abb3d2c197537d5914070a8c6d30299dfdb07c5c700 \ + --hash=sha256:cf24817855d874ede2efd071aa22125445f555de1685b739a9782fcf408c2a3d # via # gcp-releasetool # google-api-core @@ -178,72 +182,97 @@ google-cloud-storage==2.5.0 \ --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 # via gcp-docuploader -google-crc32c==1.3.0 \ - --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ - --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ - --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ - --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ - --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ - --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ - --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ - --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ - --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ - --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ - --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ - --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ - --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ - --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ - --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ - --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ - --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ - --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ - --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ - --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ - --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ - --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ - --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ - --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ - --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ - --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ - --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ - --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ - --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ - --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ - --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ - --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ - --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ - --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ - --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ - --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ - --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ - --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ - --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ - --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ - --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ - --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ - --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 +google-crc32c==1.5.0 \ + --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ + --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ + --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ + --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ + --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ + --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ + --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ + --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ + --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ + --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ + --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ + --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ + --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ + --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ + --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ + --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ + --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ + --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ + --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ + --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ + --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ + --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ + --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ + --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ + --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ + --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ + --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ + --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ + --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ + --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ + --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ + --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ + --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ + --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ + --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ + --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ + --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ + --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ + --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ + --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ + --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ + --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ + --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ + --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ + --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ + --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ + --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ + --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ + --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ + --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ + --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ + --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ + --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ + --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ + --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ + --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ + --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ + --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ + --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ + --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ + --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ + --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ + --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ + --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ + --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ + --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ + --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ + --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 # via google-resumable-media -google-resumable-media==2.3.3 \ - --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ - --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 +google-resumable-media==2.4.0 \ + --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ + --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f # via google-cloud-storage googleapis-common-protos==1.56.4 \ --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 # via google-api-core -idna==3.3 \ - --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ - --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==4.12.0 \ - --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ - --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 +importlib-metadata==5.0.0 \ + --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ + --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # -r requirements.in # twine -jaraco-classes==3.2.2 \ - --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ - --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 +jaraco-classes==3.2.3 \ + --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ + --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -255,9 +284,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.9.0 \ - --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ - --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db +keyring==23.9.3 \ + --hash=sha256:69732a15cb1433bdfbc3b980a8a36a04878a6cfd7cb99f497b573f31618001c0 \ + --hash=sha256:69b01dd83c42f590250fe7a1f503fc229b14de83857314b1933a3ddbf595c4a5 # via # gcp-releasetool # twine @@ -303,9 +332,9 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 -more-itertools==8.14.0 \ - --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ - --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 +more-itertools==9.0.0 \ + --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ + --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ @@ -325,34 +354,34 @@ platformdirs==2.5.2 \ --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 # via virtualenv -protobuf==3.20.2 \ - --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ - --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ - --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ - --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ - --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ - --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ - --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ - --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ - --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ - --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ - --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ - --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ - --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ - --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ - --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ - --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ - --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ - --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ - --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ - --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ - --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ - --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ - --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 +protobuf==3.20.3 \ + --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ + --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ + --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ + --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ + --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ + --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ + --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ + --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ + --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ + --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ + --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ + --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ + --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ + --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ + --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ + --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ + --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ + --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ + --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ + --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ + --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ + --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee # via # gcp-docuploader # gcp-releasetool # google-api-core + # googleapis-common-protos py==1.11.0 \ --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 @@ -377,9 +406,9 @@ pygments==2.13.0 \ # via # readme-renderer # rich -pyjwt==2.4.0 \ - --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ - --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba +pyjwt==2.6.0 \ + --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ + --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 # via gcp-releasetool pyparsing==3.0.9 \ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ @@ -392,9 +421,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.0 \ - --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ - --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 +readme-renderer==37.3 \ + --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ + --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine requests==2.28.1 \ --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ @@ -405,17 +434,17 @@ requests==2.28.1 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.9.1 \ - --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ - --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 +requests-toolbelt==0.10.1 \ + --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ + --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.5.1 \ - --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ - --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca +rich==12.6.0 \ + --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ + --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -437,9 +466,9 @@ twine==4.0.1 \ --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 # via -r requirements.in -typing-extensions==4.3.0 \ - --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ - --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 +typing-extensions==4.4.0 \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in urllib3==1.26.12 \ --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ @@ -447,9 +476,9 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.4 \ - --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ - --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 +virtualenv==20.16.6 \ + --hash=sha256:186ca84254abcbde98180fd17092f9628c5fe742273c02724972a1d8a2035108 \ + --hash=sha256:530b850b523c6449406dfba859d6345e48ef19b8439606c5d74d7d3c9e14d76e # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ @@ -459,13 +488,13 @@ wheel==0.37.1 \ --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 # via -r requirements.in -zipp==3.8.1 \ - --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ - --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 +zipp==3.10.0 \ + --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ + --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.2.0 \ - --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ - --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 +setuptools==65.5.0 \ + --hash=sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17 \ + --hash=sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356 # via -r requirements.in diff --git a/noxfile.py b/noxfile.py index 5f898a7a..35f3976c 100644 --- a/noxfile.py +++ b/noxfile.py @@ -277,7 +277,11 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -300,7 +304,10 @@ def docfx(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) From f9aeea3ce601bada865f73a8b4bc06fb3d9c71e6 Mon Sep 17 00:00:00 2001 From: Sampath Kumar Date: Fri, 18 Nov 2022 17:41:25 +0100 Subject: [PATCH 031/111] chore: code clean up for samples are migrated to python-docs-samples (#400) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: code clean Manually written code samples are migrated to GoogleCloudPlatform/python-docs-samples For more detail, please refer to b/257073450 * Update and rename CONTRIBUTING.md to README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Update README.md * Rename samples/README.md to samples/snippets/README.md Co-authored-by: Owl Bot --- samples/snippets/README.md | 4 + samples/snippets/api/README.rst | 98 ------ samples/snippets/api/README.rst.in | 21 -- samples/snippets/api/noxfile.py | 312 ------------------ samples/snippets/api/report_exception.py | 46 --- samples/snippets/api/report_exception_test.py | 23 -- samples/snippets/api/requirements-test.txt | 1 - samples/snippets/api/requirements.txt | 1 - samples/snippets/fluent_on_compute/README.md | 35 -- samples/snippets/fluent_on_compute/main.py | 42 --- .../snippets/fluent_on_compute/main_test.py | 23 -- samples/snippets/fluent_on_compute/noxfile.py | 312 ------------------ .../fluent_on_compute/requirements-test.txt | 2 - .../fluent_on_compute/requirements.txt | 1 - .../fluent_on_compute/startup_script.sh | 35 -- 15 files changed, 4 insertions(+), 952 deletions(-) create mode 100644 samples/snippets/README.md delete mode 100644 samples/snippets/api/README.rst delete mode 100644 samples/snippets/api/README.rst.in delete mode 100644 samples/snippets/api/noxfile.py delete mode 100644 samples/snippets/api/report_exception.py delete mode 100644 samples/snippets/api/report_exception_test.py delete mode 100644 samples/snippets/api/requirements-test.txt delete mode 100644 samples/snippets/api/requirements.txt delete mode 100644 samples/snippets/fluent_on_compute/README.md delete mode 100644 samples/snippets/fluent_on_compute/main.py delete mode 100644 samples/snippets/fluent_on_compute/main_test.py delete mode 100644 samples/snippets/fluent_on_compute/noxfile.py delete mode 100644 samples/snippets/fluent_on_compute/requirements-test.txt delete mode 100644 samples/snippets/fluent_on_compute/requirements.txt delete mode 100644 samples/snippets/fluent_on_compute/startup_script.sh diff --git a/samples/snippets/README.md b/samples/snippets/README.md new file mode 100644 index 00000000..b8c6e222 --- /dev/null +++ b/samples/snippets/README.md @@ -0,0 +1,4 @@ +Samples migrated +================ + +New location: https://github.com/GoogleCloudPlatform/python-docs-samples/tree/main/error_reporting diff --git a/samples/snippets/api/README.rst b/samples/snippets/api/README.rst deleted file mode 100644 index fe98a482..00000000 --- a/samples/snippets/api/README.rst +++ /dev/null @@ -1,98 +0,0 @@ -.. This file is automatically generated. Do not edit this file directly. - -Error Reporting Python Samples -=============================================================================== - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=error_reporting/api/README.rst - - -This directory contains samples for Error Reporting. `Error Reporting`_ aggregates and displays errors produced in - your running cloud services. - - - - -.. _Error Reporting: https://cloud.google.com/error-reporting/docs/ - -Setup -------------------------------------------------------------------------------- - - -Authentication -++++++++++++++ - -This sample requires you to have authentication setup. Refer to the -`Authentication Getting Started Guide`_ for instructions on setting up -credentials for applications. - -.. _Authentication Getting Started Guide: - https://cloud.google.com/docs/authentication/getting-started - -Install Dependencies -++++++++++++++++++++ - -#. Clone python-docs-samples and change directory to the sample directory you want to use. - - .. code-block:: bash - - $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git - -#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. - - .. _Python Development Environment Setup Guide: - https://cloud.google.com/python/setup - -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. - - .. code-block:: bash - - $ virtualenv env - $ source env/bin/activate - -#. Install the dependencies needed to run the samples. - - .. code-block:: bash - - $ pip install -r requirements.txt - -.. _pip: https://pip.pypa.io/ -.. _virtualenv: https://virtualenv.pypa.io/ - -Samples -------------------------------------------------------------------------------- - -Report Exception -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=error_reporting/api/report_exception.py,error_reporting/api/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python report_exception.py - - - - -The client library -------------------------------------------------------------------------------- - -This sample uses the `Google Cloud Client Library for Python`_. -You can read the documentation for more details on API usage and use GitHub -to `browse the source`_ and `report issues`_. - -.. _Google Cloud Client Library for Python: - https://googlecloudplatform.github.io/google-cloud-python/ -.. _browse the source: - https://github.com/GoogleCloudPlatform/google-cloud-python -.. _report issues: - https://github.com/GoogleCloudPlatform/google-cloud-python/issues - - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/samples/snippets/api/README.rst.in b/samples/snippets/api/README.rst.in deleted file mode 100644 index 56f4080f..00000000 --- a/samples/snippets/api/README.rst.in +++ /dev/null @@ -1,21 +0,0 @@ -# This file is used to generate README.rst - -product: - name: Error Reporting - short_name: Error Reporting - url: https://cloud.google.com/error-reporting/docs/ - description: > - `Error Reporting`_ aggregates and displays errors produced in - your running cloud services. - -setup: -- auth -- install_deps - -samples: -- name: Report Exception - file: report_exception.py - -cloud_client_library: true - -folder: error_reporting/api \ No newline at end of file diff --git a/samples/snippets/api/noxfile.py b/samples/snippets/api/noxfile.py deleted file mode 100644 index 0398d72f..00000000 --- a/samples/snippets/api/noxfile.py +++ /dev/null @@ -1,312 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import glob -import os -from pathlib import Path -import sys -from typing import Callable, Dict, List, Optional - -import nox - - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" - -# Copy `noxfile_config.py` to your directory and modify it instead. - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - "ignored_versions": [], - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - "enforce_type_hints": False, - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # If you need to use a specific version of pip, - # change pip_version_override to the string representation - # of the version number, for example, "20.2.4" - "pip_version_override": None, - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - "envs": {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars() -> Dict[str, str]: - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] - # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( - "True", - "true", -) - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - -# -# Style Checks -# - - -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--import-order-style=google", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") - else: - session.install("flake8", "flake8-import-order", "flake8-annotations") - - local_names = _determine_local_import_names(".") - args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), - ".", - ] - session.run("flake8", *args) - - -# -# Black -# - - -@nox.session -def blacken(session: nox.sessions.Session) -> None: - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - session.run("black", *python_files) - - -# -# format = isort + black -# - -@nox.session -def format(session: nox.sessions.Session) -> None: - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run("isort", "--fss", *python_files) - session.run("black", *python_files) - - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: - # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) - test_list.extend(glob.glob("**/tests", recursive=True)) - - if len(test_list) == 0: - print("No tests found, skipping directory.") - return - - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - concurrent_args = [] - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - with open("requirements.txt") as rfile: - packages = rfile.read() - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - with open("requirements-test.txt") as rtfile: - packages += rtfile.read() - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - if "pytest-parallel" in packages: - concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) - elif "pytest-xdist" in packages: - concurrent_args.extend(['-n', 'auto']) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session: nox.sessions.Session) -> None: - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) - - -# -# Readmegen -# - - -def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - # .git is not available in repos cloned via Cloud Build - # setup.py is always in the library's root, so use that instead - # https://github.com/googleapis/synthtool/issues/792 - if Path(p / "setup.py").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session: nox.sessions.Session, path: str) -> None: - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) diff --git a/samples/snippets/api/report_exception.py b/samples/snippets/api/report_exception.py deleted file mode 100644 index 2b7e8f06..00000000 --- a/samples/snippets/api/report_exception.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2016 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# [START error_reporting] -# [START error_reporting_quickstart] -# [START error_reporting_setup_python] -def simulate_error(): - from google.cloud import error_reporting - - client = error_reporting.Client() - try: - # simulate calling a method that's not defined - raise NameError - except Exception: - client.report_exception() -# [END error_reporting_setup_python] -# [END error_reporting_quickstart] -# [END error_reporting] - - -# [START error_reporting_manual] -# [START error_reporting_setup_python_manual] -def report_manual_error(): - from google.cloud import error_reporting - - client = error_reporting.Client() - client.report("An error has occurred.") -# [START error_reporting_setup_python_manual] -# [END error_reporting_manual] - - -if __name__ == '__main__': - simulate_error() - report_manual_error() diff --git a/samples/snippets/api/report_exception_test.py b/samples/snippets/api/report_exception_test.py deleted file mode 100644 index 042951e9..00000000 --- a/samples/snippets/api/report_exception_test.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2016 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import report_exception - - -def test_error_sends(): - report_exception.simulate_error() - - -def test_manual_error_sends(): - report_exception.report_manual_error() diff --git a/samples/snippets/api/requirements-test.txt b/samples/snippets/api/requirements-test.txt deleted file mode 100644 index 49780e03..00000000 --- a/samples/snippets/api/requirements-test.txt +++ /dev/null @@ -1 +0,0 @@ -pytest==7.2.0 diff --git a/samples/snippets/api/requirements.txt b/samples/snippets/api/requirements.txt deleted file mode 100644 index fd18a2fa..00000000 --- a/samples/snippets/api/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -google-cloud-error-reporting==1.6.3 diff --git a/samples/snippets/fluent_on_compute/README.md b/samples/snippets/fluent_on_compute/README.md deleted file mode 100644 index d3a58c16..00000000 --- a/samples/snippets/fluent_on_compute/README.md +++ /dev/null @@ -1,35 +0,0 @@ -# Google Error Reorting Samples Samples - -[![Open in Cloud Shell][shell_img]][shell_link] - -[shell_img]: http://gstatic.com/cloudssh/images/open-btn.png -[shell_link]: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=error_reporting/fluent_on_compute/README.md - -This section contains samples for [Google Cloud Error Reporting](https://cloud.google.com/error-reporting). - -A startup script has been provided to demonstrated how to properly provision a GCE -instance with fluentd configured. Note the intallation of fluentd, the addition of the config file, - and the restarting of the fluetnd service. You can start an instance using -it like this: - - gcloud compute instances create example-instance --metadata-from-file startup-script=startup_script.sh - -or simply use it as reference when creating your own instance. - -After fluentd is configured, main.py could be used to simulate an error: - - gcloud compute copy-files main.py example-instance:~/main.py - -Then, - - gcloud compute ssh example-instance - python ~/main.py - -And you will see the message in the Errors Console. - - -These samples are used on the following documentation page: - -> https://cloud.google.com/error-reporting/docs/setting-up-on-compute-engine - - diff --git a/samples/snippets/fluent_on_compute/main.py b/samples/snippets/fluent_on_compute/main.py deleted file mode 100644 index 45208c91..00000000 --- a/samples/snippets/fluent_on_compute/main.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2016 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# [START error_reporting] -import traceback - -import fluent.event -import fluent.sender - - -def simulate_error(): - fluent.sender.setup('myapp', host='localhost', port=24224) - - def report(ex): - data = {} - data['message'] = '{0}'.format(ex) - data['serviceContext'] = {'service': 'myapp'} - # ... add more metadata - fluent.event.Event('errors', data) - - # report exception data using: - try: - # simulate calling a method that's not defined - raise NameError - except Exception: - report(traceback.format_exc()) -# [END error_reporting] - - -if __name__ == '__main__': - simulate_error() diff --git a/samples/snippets/fluent_on_compute/main_test.py b/samples/snippets/fluent_on_compute/main_test.py deleted file mode 100644 index 11a24d03..00000000 --- a/samples/snippets/fluent_on_compute/main_test.py +++ /dev/null @@ -1,23 +0,0 @@ -# Copyright 2016 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import mock - -import main - - -@mock.patch("fluent.event") -def test_error_sends(event_mock): - main.simulate_error() - event_mock.Event.assert_called_once_with(mock.ANY, mock.ANY) diff --git a/samples/snippets/fluent_on_compute/noxfile.py b/samples/snippets/fluent_on_compute/noxfile.py deleted file mode 100644 index 0398d72f..00000000 --- a/samples/snippets/fluent_on_compute/noxfile.py +++ /dev/null @@ -1,312 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import glob -import os -from pathlib import Path -import sys -from typing import Callable, Dict, List, Optional - -import nox - - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" - -# Copy `noxfile_config.py` to your directory and modify it instead. - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - "ignored_versions": [], - # Old samples are opted out of enforcing Python type hints - # All new samples should feature them - "enforce_type_hints": False, - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # If you need to use a specific version of pip, - # change pip_version_override to the string representation - # of the version number, for example, "20.2.4" - "pip_version_override": None, - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - "envs": {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars() -> Dict[str, str]: - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] - # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( - "True", - "true", -) - -# Error if a python version is missing -nox.options.error_on_missing_interpreters = True - -# -# Style Checks -# - - -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--import-order-style=google", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") - else: - session.install("flake8", "flake8-import-order", "flake8-annotations") - - local_names = _determine_local_import_names(".") - args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), - ".", - ] - session.run("flake8", *args) - - -# -# Black -# - - -@nox.session -def blacken(session: nox.sessions.Session) -> None: - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - session.run("black", *python_files) - - -# -# format = isort + black -# - -@nox.session -def format(session: nox.sessions.Session) -> None: - """ - Run isort to sort imports. Then run black - to format code to uniform standard. - """ - session.install(BLACK_VERSION, ISORT_VERSION) - python_files = [path for path in os.listdir(".") if path.endswith(".py")] - - # Use the --fss option to sort imports using strict alphabetical order. - # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections - session.run("isort", "--fss", *python_files) - session.run("black", *python_files) - - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests( - session: nox.sessions.Session, post_install: Callable = None -) -> None: - # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) - test_list.extend(glob.glob("**/tests", recursive=True)) - - if len(test_list) == 0: - print("No tests found, skipping directory.") - return - - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - concurrent_args = [] - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - with open("requirements.txt") as rfile: - packages = rfile.read() - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - with open("requirements-test.txt") as rtfile: - packages += rtfile.read() - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - if "pytest-parallel" in packages: - concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) - elif "pytest-xdist" in packages: - concurrent_args.extend(['-n', 'auto']) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session: nox.sessions.Session) -> None: - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) - - -# -# Readmegen -# - - -def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - # .git is not available in repos cloned via Cloud Build - # setup.py is always in the library's root, so use that instead - # https://github.com/googleapis/synthtool/issues/792 - if Path(p / "setup.py").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session: nox.sessions.Session, path: str) -> None: - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) diff --git a/samples/snippets/fluent_on_compute/requirements-test.txt b/samples/snippets/fluent_on_compute/requirements-test.txt deleted file mode 100644 index 980c425b..00000000 --- a/samples/snippets/fluent_on_compute/requirements-test.txt +++ /dev/null @@ -1,2 +0,0 @@ -pytest==7.2.0 -mock==4.0.3 diff --git a/samples/snippets/fluent_on_compute/requirements.txt b/samples/snippets/fluent_on_compute/requirements.txt deleted file mode 100644 index 693841f6..00000000 --- a/samples/snippets/fluent_on_compute/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -fluent-logger==0.10.0 diff --git a/samples/snippets/fluent_on_compute/startup_script.sh b/samples/snippets/fluent_on_compute/startup_script.sh deleted file mode 100644 index f2ef895d..00000000 --- a/samples/snippets/fluent_on_compute/startup_script.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2016 Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -v - -curl -sSO "https://dl.google.com/cloudagents/install-logging-agent.sh" -chmod +x install-logging-agent.sh -./install-logging-agent.sh -mkdir -p /etc/google-fluentd/config.d/ -cat < /etc/google-fluentd/config.d/forward.conf - - type forward - port 24224 - -EOF -service google-fluentd restart - -apt-get update -apt-get install -yq \ - git build-essential supervisor python python-dev python-pip libffi-dev \ - libssl-dev -pip install fluent-logger - From 38ca25d3f3d897e02f47c4dbca5f4411aedfc91f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 19 Nov 2022 11:32:03 -0500 Subject: [PATCH 032/111] chore(python): update release script dependencies (#399) Source-Link: https://github.com/googleapis/synthtool/commit/25083af347468dd5f90f69627420f7d452b6c50e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 2 +- .github/workflows/docs.yml | 4 +-- .github/workflows/lint.yml | 2 +- .github/workflows/unittest.yml | 2 +- .kokoro/docker/docs/Dockerfile | 12 +++---- .kokoro/requirements.in | 4 ++- .kokoro/requirements.txt | 61 ++++++++++++++++++---------------- noxfile.py | 4 +-- 8 files changed, 48 insertions(+), 43 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 12edee77..3f1ccc08 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:452901c74a22f9b9a3bd02bce780b8e8805c97270d424684bff809ce5be8c2a2 + digest: sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 7092a139..e97d89e4 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel @@ -28,7 +28,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index d2aee5b7..16d5a9e9 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 87ade4d5..23000c05 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -41,7 +41,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install coverage run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 238b87b9..f8137d0a 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -60,16 +60,16 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.8.11 +###################### Install python 3.9.13 -# Download python 3.8.11 -RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz +# Download python 3.9.13 +RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz # Extract files -RUN tar -xvf Python-3.8.11.tgz +RUN tar -xvf Python-3.9.13.tgz -# Install python 3.8.11 -RUN ./Python-3.8.11/configure --enable-optimizations +# Install python 3.9.13 +RUN ./Python-3.9.13/configure --enable-optimizations RUN make altinstall ###################### Install pip diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index 7718391a..cbd7e77f 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -5,4 +5,6 @@ typing-extensions twine wheel setuptools -nox \ No newline at end of file +nox +charset-normalizer<3 +click<8.1.0 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 31425f16..9c1b9be3 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,11 +93,14 @@ cffi==1.15.1 \ charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via requests + # via + # -r requirements.in + # requests click==8.0.4 \ --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb # via + # -r requirements.in # gcp-docuploader # gcp-releasetool colorlog==6.7.0 \ @@ -156,9 +159,9 @@ gcp-docuploader==0.6.4 \ --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.9.1 \ - --hash=sha256:952f4055d5d986b070ae2a71c4410b250000f9cc5a1e26398fcd55a5bbc5a15f \ - --hash=sha256:d0d3c814a97c1a237517e837d8cfa668ced8df4b882452578ecef4a4e79c583b +gcp-releasetool==1.10.0 \ + --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ + --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d # via -r requirements.in google-api-core==2.10.2 \ --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ @@ -166,9 +169,9 @@ google-api-core==2.10.2 \ # via # google-cloud-core # google-cloud-storage -google-auth==2.14.0 \ - --hash=sha256:1ad5b0e6eba5f69645971abb3d2c197537d5914070a8c6d30299dfdb07c5c700 \ - --hash=sha256:cf24817855d874ede2efd071aa22125445f555de1685b739a9782fcf408c2a3d +google-auth==2.14.1 \ + --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ + --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 # via # gcp-releasetool # google-api-core @@ -178,9 +181,9 @@ google-cloud-core==2.3.2 \ --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a # via google-cloud-storage -google-cloud-storage==2.5.0 \ - --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ - --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 +google-cloud-storage==2.6.0 \ + --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ + --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -256,9 +259,9 @@ google-resumable-media==2.4.0 \ --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f # via google-cloud-storage -googleapis-common-protos==1.56.4 \ - --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ - --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 +googleapis-common-protos==1.57.0 \ + --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ + --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c # via google-api-core idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ @@ -269,6 +272,7 @@ importlib-metadata==5.0.0 \ --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # -r requirements.in + # keyring # twine jaraco-classes==3.2.3 \ --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ @@ -284,9 +288,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.9.3 \ - --hash=sha256:69732a15cb1433bdfbc3b980a8a36a04878a6cfd7cb99f497b573f31618001c0 \ - --hash=sha256:69b01dd83c42f590250fe7a1f503fc229b14de83857314b1933a3ddbf595c4a5 +keyring==23.11.0 \ + --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ + --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 # via # gcp-releasetool # twine @@ -350,9 +354,9 @@ pkginfo==1.8.3 \ --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c # via twine -platformdirs==2.5.2 \ - --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ - --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 +platformdirs==2.5.4 \ + --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ + --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 # via virtualenv protobuf==3.20.3 \ --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ @@ -381,7 +385,6 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core - # googleapis-common-protos py==1.11.0 \ --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 @@ -476,17 +479,17 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.6 \ - --hash=sha256:186ca84254abcbde98180fd17092f9628c5fe742273c02724972a1d8a2035108 \ - --hash=sha256:530b850b523c6449406dfba859d6345e48ef19b8439606c5d74d7d3c9e14d76e +virtualenv==20.16.7 \ + --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ + --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 # via bleach -wheel==0.37.1 \ - --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ - --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 +wheel==0.38.4 \ + --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ + --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 # via -r requirements.in zipp==3.10.0 \ --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ @@ -494,7 +497,7 @@ zipp==3.10.0 \ # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.5.0 \ - --hash=sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17 \ - --hash=sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356 +setuptools==65.5.1 \ + --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ + --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f # via -r requirements.in diff --git a/noxfile.py b/noxfile.py index 35f3976c..d752b54f 100644 --- a/noxfile.py +++ b/noxfile.py @@ -272,7 +272,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docs(session): """Build the docs for this library.""" @@ -298,7 +298,7 @@ def docs(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docfx(session): """Build the docfx yaml files for this library.""" From 89b02c5126f41477d7cd178fe83a5239cf93ec0e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 13 Dec 2022 12:47:21 -0500 Subject: [PATCH 033/111] build(deps): bump certifi from 2022.9.24 to 2022.12.7 [autoapprove] (#401) * build(deps): bump certifi from 2022.9.24 to 2022.12.7 in /synthtool/gcp/templates/python_library/.kokoro Source-Link: https://github.com/googleapis/synthtool/commit/b4fe62efb5114b6738ad4b13d6f654f2bf4b7cc0 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 * trigger ci Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 3 ++- .kokoro/requirements.txt | 6 +++--- .pre-commit-config.yaml | 2 +- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 3f1ccc08..df2cfe5d 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 + digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 + diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 9c1b9be3..05dc4672 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.9.24 \ - --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ - --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 46d23716..5405cc8f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: rev: 22.3.0 hooks: - id: black -- repo: https://gitlab.com/pycqa/flake8 +- repo: https://github.com/pycqa/flake8 rev: 3.9.2 hooks: - id: flake8 From ccaa40f1eca7d001cb58cd340189d521e93632ff Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 15 Dec 2022 11:48:32 -0500 Subject: [PATCH 034/111] fix(deps): Require google-api-core >=1.34.0, >=2.11.0 (#396) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update to gapic-generator-python 1.5.0 feat: add support for `google.cloud..__version__` PiperOrigin-RevId: 484665853 Source-Link: https://github.com/googleapis/googleapis/commit/8eb249a19db926c2fbc4ecf1dc09c0e521a88b22 Source-Link: https://github.com/googleapis/googleapis-gen/commit/c8aa327b5f478865fc3fd91e3c2768e54e26ad44 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYzhhYTMyN2I1ZjQ3ODg2NWZjM2ZkOTFlM2MyNzY4ZTU0ZTI2YWQ0NCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update to gapic-generator-python 1.6.0 feat(python): Add typing to proto.Message based class attributes feat(python): Snippetgen handling of repeated enum field PiperOrigin-RevId: 487326846 Source-Link: https://github.com/googleapis/googleapis/commit/da380c77bb87ba0f752baf07605dd1db30e1f7e1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/61ef5762ee6731a0cbbfea22fd0eecee51ab1c8e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjFlZjU3NjJlZTY3MzFhMGNiYmZlYTIyZmQwZWVjZWU1MWFiMWM4ZSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: new APIs added to reflect updates to the filestore service - Add ENTERPRISE Tier - Add snapshot APIs: RevertInstance, ListSnapshots, CreateSnapshot, DeleteSnapshot, UpdateSnapshot - Add multi-share APIs: ListShares, GetShare, CreateShare, DeleteShare, UpdateShare - Add ConnectMode to NetworkConfig (for Private Service Access support) - New status codes (SUSPENDED/SUSPENDING, REVERTING/RESUMING) - Add SuspensionReason (for KMS related suspension) - Add new fields to Instance information: max_capacity_gb, capacity_step_size_gb, max_share_count, capacity_gb, multi_share_enabled PiperOrigin-RevId: 487492758 Source-Link: https://github.com/googleapis/googleapis/commit/5be5981f50322cf0c7388595e0f31ac5d0693469 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ab0e217f560cc2c1afc11441c2eab6b6950efd2b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWIwZTIxN2Y1NjBjYzJjMWFmYzExNDQxYzJlYWI2YjY5NTBlZmQyYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.6.1 PiperOrigin-RevId: 488036204 Source-Link: https://github.com/googleapis/googleapis/commit/08f275f5c1c0d99056e1cb68376323414459ee19 Source-Link: https://github.com/googleapis/googleapis-gen/commit/555c0945e60649e38739ae64bc45719cdf72178f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTU1YzA5NDVlNjA2NDllMzg3MzlhZTY0YmM0NTcxOWNkZjcyMTc4ZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore(python): fix warehouse-package-name for clouderrorreporting v1beta1 PiperOrigin-RevId: 491971000 Source-Link: https://github.com/googleapis/googleapis/commit/f0c519fb308a021f1a46eaf4428f49bb7c059edc Source-Link: https://github.com/googleapis/googleapis-gen/commit/7a1ce8ade63c592561aece1a734dc32e4cfcd36b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2ExY2U4YWRlNjNjNTkyNTYxYWVjZTFhNzM0ZGMzMmU0Y2ZjZDM2YiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(deps): Require google-api-core >=1.34.0, >=2.11.0 fix: Drop usage of pkg_resources fix: Fix timeout default values docs(samples): Snippetgen should call await on the operation coroutine before calling result PiperOrigin-RevId: 493260409 Source-Link: https://github.com/googleapis/googleapis/commit/fea43879f83a8d0dacc9353b3f75f8f46d37162f Source-Link: https://github.com/googleapis/googleapis-gen/commit/387b7344c7529ee44be84e613b19a820508c612b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzg3YjczNDRjNzUyOWVlNDRiZTg0ZTYxM2IxOWE4MjA1MDhjNjEyYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * add gapic_version.py * fix path in setup.py * fix build Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .coveragerc | 5 -- .github/release-please.yml | 1 + .release-please-manifest.json | 3 + docs/errorreporting_v1beta1/types.rst | 1 - google/cloud/error_reporting/__init__.py | 4 +- google/cloud/error_reporting/gapic_version.py | 16 ++++ .../cloud/errorreporting_v1beta1/__init__.py | 4 + .../errorreporting_v1beta1/gapic_version.py | 16 ++++ google/cloud/errorreporting_v1beta1/py.typed | 2 +- .../error_group_service/async_client.py | 46 +++++----- .../services/error_group_service/client.py | 48 +++++----- .../error_group_service/transports/base.py | 16 ++-- .../error_group_service/transports/grpc.py | 20 ++--- .../transports/grpc_asyncio.py | 16 ++-- .../error_stats_service/async_client.py | 60 +++++++------ .../services/error_stats_service/client.py | 60 +++++++------ .../error_stats_service/transports/base.py | 16 ++-- .../error_stats_service/transports/grpc.py | 20 ++--- .../transports/grpc_asyncio.py | 16 ++-- .../report_errors_service/async_client.py | 42 +++++---- .../services/report_errors_service/client.py | 46 ++++++---- .../report_errors_service/transports/base.py | 16 ++-- .../report_errors_service/transports/grpc.py | 20 ++--- .../transports/grpc_asyncio.py | 16 ++-- .../errorreporting_v1beta1/types/common.py | 52 +++++------ .../types/error_group_service.py | 6 +- .../types/error_stats_service.py | 90 ++++++++++--------- .../types/report_errors_service.py | 14 +-- owlbot.py | 70 +++++++++++---- release-please-config.json | 24 +++++ ...ted_error_group_service_get_group_async.py | 2 +- ...ated_error_group_service_get_group_sync.py | 2 +- ..._error_group_service_update_group_async.py | 2 +- ...d_error_group_service_update_group_sync.py | 2 +- ...error_stats_service_delete_events_async.py | 2 +- ..._error_stats_service_delete_events_sync.py | 2 +- ...d_error_stats_service_list_events_async.py | 2 +- ...ed_error_stats_service_list_events_sync.py | 2 +- ...or_stats_service_list_group_stats_async.py | 2 +- ...ror_stats_service_list_group_stats_sync.py | 2 +- ...errors_service_report_error_event_async.py | 2 +- ..._errors_service_report_error_event_sync.py | 2 +- ...devtools.clouderrorreporting.v1beta1.json} | 3 +- setup.py | 50 +++++------ testing/constraints-3.10.txt | 6 ++ testing/constraints-3.11.txt | 6 ++ testing/constraints-3.7.txt | 11 ++- testing/constraints-3.8.txt | 6 ++ testing/constraints-3.9.txt | 6 ++ 49 files changed, 519 insertions(+), 359 deletions(-) create mode 100644 .release-please-manifest.json create mode 100644 google/cloud/error_reporting/gapic_version.py create mode 100644 google/cloud/errorreporting_v1beta1/gapic_version.py create mode 100644 release-please-config.json rename samples/generated_samples/{snippet_metadata_errorreporting_v1beta1.json => snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json} (99%) diff --git a/.coveragerc b/.coveragerc index f72fce1e..67733e5c 100644 --- a/.coveragerc +++ b/.coveragerc @@ -10,8 +10,3 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/.github/release-please.yml b/.github/release-please.yml index 6def37a8..e9a4f008 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1,5 +1,6 @@ releaseType: python handleGHRelease: true +manifest: true # NOTE: this section is generated by synthtool.languages.python # See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py branches: diff --git a/.release-please-manifest.json b/.release-please-manifest.json new file mode 100644 index 00000000..2a8d04b3 --- /dev/null +++ b/.release-please-manifest.json @@ -0,0 +1,3 @@ +{ + ".": "1.6.3" +} diff --git a/docs/errorreporting_v1beta1/types.rst b/docs/errorreporting_v1beta1/types.rst index 179256c7..08851dbe 100644 --- a/docs/errorreporting_v1beta1/types.rst +++ b/docs/errorreporting_v1beta1/types.rst @@ -3,5 +3,4 @@ Types for Google Cloud Errorreporting v1beta1 API .. automodule:: google.cloud.errorreporting_v1beta1.types :members: - :undoc-members: :show-inheritance: diff --git a/google/cloud/error_reporting/__init__.py b/google/cloud/error_reporting/__init__.py index 8cbe3963..1b729821 100644 --- a/google/cloud/error_reporting/__init__.py +++ b/google/cloud/error_reporting/__init__.py @@ -15,9 +15,9 @@ """Client library for Error Reporting""" -from pkg_resources import get_distribution +from google.cloud.error_reporting import gapic_version as package_version -__version__ = get_distribution("google-cloud-error-reporting").version +__version__ = package_version.__version__ from google.cloud.error_reporting.client import Client from google.cloud.error_reporting.client import HTTPContext diff --git a/google/cloud/error_reporting/gapic_version.py b/google/cloud/error_reporting/gapic_version.py new file mode 100644 index 00000000..a7807d3a --- /dev/null +++ b/google/cloud/error_reporting/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.6.3" # {x-release-please-version} diff --git a/google/cloud/errorreporting_v1beta1/__init__.py b/google/cloud/errorreporting_v1beta1/__init__.py index 04baaaa4..32bd5db9 100644 --- a/google/cloud/errorreporting_v1beta1/__init__.py +++ b/google/cloud/errorreporting_v1beta1/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.cloud.errorreporting_v1beta1 import gapic_version as package_version + +__version__ = package_version.__version__ + from .services.error_group_service import ErrorGroupServiceClient from .services.error_group_service import ErrorGroupServiceAsyncClient diff --git a/google/cloud/errorreporting_v1beta1/gapic_version.py b/google/cloud/errorreporting_v1beta1/gapic_version.py new file mode 100644 index 00000000..a7807d3a --- /dev/null +++ b/google/cloud/errorreporting_v1beta1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "1.6.3" # {x-release-please-version} diff --git a/google/cloud/errorreporting_v1beta1/py.typed b/google/cloud/errorreporting_v1beta1/py.typed index 20bf6ac6..01870137 100644 --- a/google/cloud/errorreporting_v1beta1/py.typed +++ b/google/cloud/errorreporting_v1beta1/py.typed @@ -1,2 +1,2 @@ # Marker file for PEP 561. -# The google-cloud-errorreporting package uses inline types. +# The google-cloud-error-reporting package uses inline types. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py index 8af3cd82..ac13ab34 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py @@ -16,8 +16,19 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.cloud.errorreporting_v1beta1 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -160,9 +171,9 @@ def transport(self) -> ErrorGroupServiceTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, ErrorGroupServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the error group service client. @@ -206,11 +217,11 @@ def __init__( async def get_group( self, - request: Union[error_group_service.GetGroupRequest, dict] = None, + request: Optional[Union[error_group_service.GetGroupRequest, dict]] = None, *, - group_name: str = None, + group_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> common.ErrorGroup: r"""Get the specified group. @@ -242,7 +253,7 @@ async def sample_get_group(): print(response) Args: - request (Union[google.cloud.errorreporting_v1beta1.types.GetGroupRequest, dict]): + request (Optional[Union[google.cloud.errorreporting_v1beta1.types.GetGroupRequest, dict]]): The request object. A request to return an individual group. group_name (:class:`str`): @@ -314,11 +325,11 @@ async def sample_get_group(): async def update_group( self, - request: Union[error_group_service.UpdateGroupRequest, dict] = None, + request: Optional[Union[error_group_service.UpdateGroupRequest, dict]] = None, *, - group: common.ErrorGroup = None, + group: Optional[common.ErrorGroup] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> common.ErrorGroup: r"""Replace the data for the specified group. @@ -350,7 +361,7 @@ async def sample_update_group(): print(response) Args: - request (Union[google.cloud.errorreporting_v1beta1.types.UpdateGroupRequest, dict]): + request (Optional[Union[google.cloud.errorreporting_v1beta1.types.UpdateGroupRequest, dict]]): The request object. A request to replace the existing data for the given group. group (:class:`google.cloud.errorreporting_v1beta1.types.ErrorGroup`): @@ -423,14 +434,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-errorreporting", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("ErrorGroupServiceAsyncClient",) diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py index 86ae1120..b2224989 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py @@ -16,8 +16,20 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.cloud.errorreporting_v1beta1 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -57,7 +69,7 @@ class ErrorGroupServiceClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[ErrorGroupServiceTransport]: """Returns an appropriate transport class. @@ -327,8 +339,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ErrorGroupServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, ErrorGroupServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the error group service client. @@ -342,7 +354,7 @@ def __init__( transport (Union[str, ErrorGroupServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -372,6 +384,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -424,11 +437,11 @@ def __init__( def get_group( self, - request: Union[error_group_service.GetGroupRequest, dict] = None, + request: Optional[Union[error_group_service.GetGroupRequest, dict]] = None, *, - group_name: str = None, + group_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> common.ErrorGroup: r"""Get the specified group. @@ -532,11 +545,11 @@ def sample_get_group(): def update_group( self, - request: Union[error_group_service.UpdateGroupRequest, dict] = None, + request: Optional[Union[error_group_service.UpdateGroupRequest, dict]] = None, *, - group: common.ErrorGroup = None, + group: Optional[common.ErrorGroup] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> common.ErrorGroup: r"""Replace the data for the specified group. @@ -648,14 +661,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-errorreporting", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("ErrorGroupServiceClient",) diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py index d2ef9e3f..665ee116 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.errorreporting_v1beta1 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -28,14 +29,9 @@ from google.cloud.errorreporting_v1beta1.types import common from google.cloud.errorreporting_v1beta1.types import error_group_service -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-errorreporting", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class ErrorGroupServiceTransport(abc.ABC): @@ -49,7 +45,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py index a4595425..4145ab7b 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py @@ -48,14 +48,14 @@ def __init__( self, *, host: str = "clouderrorreporting.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -182,8 +182,8 @@ def __init__( def create_channel( cls, host: str = "clouderrorreporting.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py index be98ee03..69497298 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py @@ -50,7 +50,7 @@ class ErrorGroupServiceGrpcAsyncIOTransport(ErrorGroupServiceTransport): def create_channel( cls, host: str = "clouderrorreporting.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -93,15 +93,15 @@ def __init__( self, *, host: str = "clouderrorreporting.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py index 88b31900..84c42d3e 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py @@ -16,8 +16,19 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.cloud.errorreporting_v1beta1 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -163,9 +174,9 @@ def transport(self) -> ErrorStatsServiceTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, ErrorStatsServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the error stats service client. @@ -209,12 +220,14 @@ def __init__( async def list_group_stats( self, - request: Union[error_stats_service.ListGroupStatsRequest, dict] = None, + request: Optional[ + Union[error_stats_service.ListGroupStatsRequest, dict] + ] = None, *, - project_name: str = None, - time_range: error_stats_service.QueryTimeRange = None, + project_name: Optional[str] = None, + time_range: Optional[error_stats_service.QueryTimeRange] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListGroupStatsAsyncPager: r"""Lists the specified groups. @@ -247,7 +260,7 @@ async def sample_list_group_stats(): print(response) Args: - request (Union[google.cloud.errorreporting_v1beta1.types.ListGroupStatsRequest, dict]): + request (Optional[Union[google.cloud.errorreporting_v1beta1.types.ListGroupStatsRequest, dict]]): The request object. Specifies a set of `ErrorGroupStats` to return. project_name (:class:`str`): @@ -348,12 +361,12 @@ async def sample_list_group_stats(): async def list_events( self, - request: Union[error_stats_service.ListEventsRequest, dict] = None, + request: Optional[Union[error_stats_service.ListEventsRequest, dict]] = None, *, - project_name: str = None, - group_id: str = None, + project_name: Optional[str] = None, + group_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListEventsAsyncPager: r"""Lists the specified events. @@ -387,7 +400,7 @@ async def sample_list_events(): print(response) Args: - request (Union[google.cloud.errorreporting_v1beta1.types.ListEventsRequest, dict]): + request (Optional[Union[google.cloud.errorreporting_v1beta1.types.ListEventsRequest, dict]]): The request object. Specifies a set of error events to return. project_name (:class:`str`): @@ -480,11 +493,11 @@ async def sample_list_events(): async def delete_events( self, - request: Union[error_stats_service.DeleteEventsRequest, dict] = None, + request: Optional[Union[error_stats_service.DeleteEventsRequest, dict]] = None, *, - project_name: str = None, + project_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> error_stats_service.DeleteEventsResponse: r"""Deletes all error events of a given project. @@ -516,7 +529,7 @@ async def sample_delete_events(): print(response) Args: - request (Union[google.cloud.errorreporting_v1beta1.types.DeleteEventsRequest, dict]): + request (Optional[Union[google.cloud.errorreporting_v1beta1.types.DeleteEventsRequest, dict]]): The request object. Deletes all events in the project. project_name (:class:`str`): Required. The resource name of the Google Cloud Platform @@ -592,14 +605,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-errorreporting", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("ErrorStatsServiceAsyncClient",) diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py index 77690989..39b6b5fd 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py @@ -16,8 +16,20 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.cloud.errorreporting_v1beta1 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -58,7 +70,7 @@ class ErrorStatsServiceClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[ErrorStatsServiceTransport]: """Returns an appropriate transport class. @@ -330,8 +342,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ErrorStatsServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, ErrorStatsServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the error stats service client. @@ -345,7 +357,7 @@ def __init__( transport (Union[str, ErrorStatsServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -375,6 +387,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -427,12 +440,14 @@ def __init__( def list_group_stats( self, - request: Union[error_stats_service.ListGroupStatsRequest, dict] = None, + request: Optional[ + Union[error_stats_service.ListGroupStatsRequest, dict] + ] = None, *, - project_name: str = None, - time_range: error_stats_service.QueryTimeRange = None, + project_name: Optional[str] = None, + time_range: Optional[error_stats_service.QueryTimeRange] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListGroupStatsPager: r"""Lists the specified groups. @@ -566,12 +581,12 @@ def sample_list_group_stats(): def list_events( self, - request: Union[error_stats_service.ListEventsRequest, dict] = None, + request: Optional[Union[error_stats_service.ListEventsRequest, dict]] = None, *, - project_name: str = None, - group_id: str = None, + project_name: Optional[str] = None, + group_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListEventsPager: r"""Lists the specified events. @@ -698,11 +713,11 @@ def sample_list_events(): def delete_events( self, - request: Union[error_stats_service.DeleteEventsRequest, dict] = None, + request: Optional[Union[error_stats_service.DeleteEventsRequest, dict]] = None, *, - project_name: str = None, + project_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> error_stats_service.DeleteEventsResponse: r"""Deletes all error events of a given project. @@ -817,14 +832,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-errorreporting", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("ErrorStatsServiceClient",) diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py index d7970fc8..a2546b27 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.errorreporting_v1beta1 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -27,14 +28,9 @@ from google.cloud.errorreporting_v1beta1.types import error_stats_service -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-errorreporting", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class ErrorStatsServiceTransport(abc.ABC): @@ -48,7 +44,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py index c2dbd844..21e0d0c2 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py @@ -48,14 +48,14 @@ def __init__( self, *, host: str = "clouderrorreporting.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -182,8 +182,8 @@ def __init__( def create_channel( cls, host: str = "clouderrorreporting.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py index 9fd8c8fd..b51a2ccb 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py @@ -50,7 +50,7 @@ class ErrorStatsServiceGrpcAsyncIOTransport(ErrorStatsServiceTransport): def create_channel( cls, host: str = "clouderrorreporting.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -93,15 +93,15 @@ def __init__( self, *, host: str = "clouderrorreporting.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py index ae912ecd..abd5207b 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py @@ -16,8 +16,19 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.cloud.errorreporting_v1beta1 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -156,9 +167,9 @@ def transport(self) -> ReportErrorsServiceTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, ReportErrorsServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the report errors service client. @@ -202,12 +213,14 @@ def __init__( async def report_error_event( self, - request: Union[report_errors_service.ReportErrorEventRequest, dict] = None, + request: Optional[ + Union[report_errors_service.ReportErrorEventRequest, dict] + ] = None, *, - project_name: str = None, - event: report_errors_service.ReportedErrorEvent = None, + project_name: Optional[str] = None, + event: Optional[report_errors_service.ReportedErrorEvent] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> report_errors_service.ReportErrorEventResponse: r"""Report an individual error event and record the event to a log. @@ -258,7 +271,7 @@ async def sample_report_error_event(): print(response) Args: - request (Union[google.cloud.errorreporting_v1beta1.types.ReportErrorEventRequest, dict]): + request (Optional[Union[google.cloud.errorreporting_v1beta1.types.ReportErrorEventRequest, dict]]): The request object. A request for reporting an individual error event. project_name (:class:`str`): @@ -345,14 +358,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-errorreporting", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("ReportErrorsServiceAsyncClient",) diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py index 9d6f734a..984a993e 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py @@ -16,8 +16,20 @@ from collections import OrderedDict import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +from google.cloud.errorreporting_v1beta1 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -56,7 +68,7 @@ class ReportErrorsServiceClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[ReportErrorsServiceTransport]: """Returns an appropriate transport class. @@ -309,8 +321,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ReportErrorsServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, ReportErrorsServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the report errors service client. @@ -324,7 +336,7 @@ def __init__( transport (Union[str, ReportErrorsServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -354,6 +366,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -406,12 +419,14 @@ def __init__( def report_error_event( self, - request: Union[report_errors_service.ReportErrorEventRequest, dict] = None, + request: Optional[ + Union[report_errors_service.ReportErrorEventRequest, dict] + ] = None, *, - project_name: str = None, - event: report_errors_service.ReportedErrorEvent = None, + project_name: Optional[str] = None, + event: Optional[report_errors_service.ReportedErrorEvent] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> report_errors_service.ReportErrorEventResponse: r"""Report an individual error event and record the event to a log. @@ -556,14 +571,9 @@ def __exit__(self, type, value, traceback): self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-errorreporting", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) __all__ = ("ReportErrorsServiceClient",) diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py index e2ec3343..77dc674a 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.cloud.errorreporting_v1beta1 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -27,14 +28,9 @@ from google.cloud.errorreporting_v1beta1.types import report_errors_service -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-errorreporting", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) class ReportErrorsServiceTransport(abc.ABC): @@ -48,7 +44,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py index 2c08420c..fb212ee3 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py @@ -47,14 +47,14 @@ def __init__( self, *, host: str = "clouderrorreporting.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -181,8 +181,8 @@ def __init__( def create_channel( cls, host: str = "clouderrorreporting.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py index def71459..44636c83 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py @@ -49,7 +49,7 @@ class ReportErrorsServiceGrpcAsyncIOTransport(ReportErrorsServiceTransport): def create_channel( cls, host: str = "clouderrorreporting.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -92,15 +92,15 @@ def __init__( self, *, host: str = "clouderrorreporting.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/google/cloud/errorreporting_v1beta1/types/common.py b/google/cloud/errorreporting_v1beta1/types/common.py index 1fbc87c6..e84ca351 100644 --- a/google/cloud/errorreporting_v1beta1/types/common.py +++ b/google/cloud/errorreporting_v1beta1/types/common.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -55,7 +57,7 @@ class ErrorGroup(proto.Message): the same kind of error occurs in different service contexts, it will receive the same group ID. - tracking_issues (Sequence[google.cloud.errorreporting_v1beta1.types.TrackingIssue]): + tracking_issues (MutableSequence[google.cloud.errorreporting_v1beta1.types.TrackingIssue]): Associated tracking issues. resolution_status (google.cloud.errorreporting_v1beta1.types.ResolutionStatus): Error group's resolution status. @@ -63,20 +65,20 @@ class ErrorGroup(proto.Message): interpreted as OPEN """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - group_id = proto.Field( + group_id: str = proto.Field( proto.STRING, number=2, ) - tracking_issues = proto.RepeatedField( + tracking_issues: MutableSequence["TrackingIssue"] = proto.RepeatedField( proto.MESSAGE, number=3, message="TrackingIssue", ) - resolution_status = proto.Field( + resolution_status: "ResolutionStatus" = proto.Field( proto.ENUM, number=5, enum="ResolutionStatus", @@ -94,7 +96,7 @@ class TrackingIssue(proto.Message): ``https://github.com/user/project/issues/4`` """ - url = proto.Field( + url: str = proto.Field( proto.STRING, number=1, ) @@ -120,21 +122,21 @@ class ErrorEvent(proto.Message): occurred. """ - event_time = proto.Field( + event_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - service_context = proto.Field( + service_context: "ServiceContext" = proto.Field( proto.MESSAGE, number=2, message="ServiceContext", ) - message = proto.Field( + message: str = proto.Field( proto.STRING, number=3, ) - context = proto.Field( + context: "ErrorContext" = proto.Field( proto.MESSAGE, number=5, message="ErrorContext", @@ -171,15 +173,15 @@ class ServiceContext(proto.Message): and must not be set when reporting errors. """ - service = proto.Field( + service: str = proto.Field( proto.STRING, number=2, ) - version = proto.Field( + version: str = proto.Field( proto.STRING, number=3, ) - resource_type = proto.Field( + resource_type: str = proto.Field( proto.STRING, number=4, ) @@ -213,16 +215,16 @@ class ErrorContext(proto.Message): place where it was caught. """ - http_request = proto.Field( + http_request: "HttpRequestContext" = proto.Field( proto.MESSAGE, number=1, message="HttpRequestContext", ) - user = proto.Field( + user: str = proto.Field( proto.STRING, number=2, ) - report_location = proto.Field( + report_location: "SourceLocation" = proto.Field( proto.MESSAGE, number=3, message="SourceLocation", @@ -258,27 +260,27 @@ class HttpRequestContext(proto.Message): report. """ - method = proto.Field( + method: str = proto.Field( proto.STRING, number=1, ) - url = proto.Field( + url: str = proto.Field( proto.STRING, number=2, ) - user_agent = proto.Field( + user_agent: str = proto.Field( proto.STRING, number=3, ) - referrer = proto.Field( + referrer: str = proto.Field( proto.STRING, number=4, ) - response_status_code = proto.Field( + response_status_code: int = proto.Field( proto.INT32, number=5, ) - remote_ip = proto.Field( + remote_ip: str = proto.Field( proto.STRING, number=6, ) @@ -305,15 +307,15 @@ class SourceLocation(proto.Message): example, ``my.package.MyClass.method`` in case of Java. """ - file_path = proto.Field( + file_path: str = proto.Field( proto.STRING, number=1, ) - line_number = proto.Field( + line_number: int = proto.Field( proto.INT32, number=2, ) - function_name = proto.Field( + function_name: str = proto.Field( proto.STRING, number=4, ) diff --git a/google/cloud/errorreporting_v1beta1/types/error_group_service.py b/google/cloud/errorreporting_v1beta1/types/error_group_service.py index f40ad9a9..a3afd595 100644 --- a/google/cloud/errorreporting_v1beta1/types/error_group_service.py +++ b/google/cloud/errorreporting_v1beta1/types/error_group_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.cloud.errorreporting_v1beta1.types import common @@ -40,7 +42,7 @@ class GetGroupRequest(proto.Message): Example: ``projects/my-project-123/groups/my-group`` """ - group_name = proto.Field( + group_name: str = proto.Field( proto.STRING, number=1, ) @@ -55,7 +57,7 @@ class UpdateGroupRequest(proto.Message): resource on the server. """ - group = proto.Field( + group: common.ErrorGroup = proto.Field( proto.MESSAGE, number=1, message=common.ErrorGroup, diff --git a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py index 15615596..e28c7dde 100644 --- a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py +++ b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.cloud.errorreporting_v1beta1.types import common @@ -69,7 +71,7 @@ class ListGroupStatsRequest(proto.Message): Console `__. Examples: ``projects/my-project-123``, ``projects/5551234``. - group_id (Sequence[str]): + group_id (MutableSequence[str]): Optional. List all ErrorGroupStats with these IDs. service_filter (google.cloud.errorreporting_v1beta1.types.ServiceContextFilter): @@ -108,49 +110,49 @@ class ListGroupStatsRequest(proto.Message): with the identical query parameters as the first request. """ - project_name = proto.Field( + project_name: str = proto.Field( proto.STRING, number=1, ) - group_id = proto.RepeatedField( + group_id: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) - service_filter = proto.Field( + service_filter: "ServiceContextFilter" = proto.Field( proto.MESSAGE, number=3, message="ServiceContextFilter", ) - time_range = proto.Field( + time_range: "QueryTimeRange" = proto.Field( proto.MESSAGE, number=5, message="QueryTimeRange", ) - timed_count_duration = proto.Field( + timed_count_duration: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=6, message=duration_pb2.Duration, ) - alignment = proto.Field( + alignment: "TimedCountAlignment" = proto.Field( proto.ENUM, number=7, enum="TimedCountAlignment", ) - alignment_time = proto.Field( + alignment_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=8, message=timestamp_pb2.Timestamp, ) - order = proto.Field( + order: "ErrorGroupOrder" = proto.Field( proto.ENUM, number=9, enum="ErrorGroupOrder", ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=11, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=12, ) @@ -160,7 +162,7 @@ class ListGroupStatsResponse(proto.Message): r"""Contains a set of requested error group stats. Attributes: - error_group_stats (Sequence[google.cloud.errorreporting_v1beta1.types.ErrorGroupStats]): + error_group_stats (MutableSequence[google.cloud.errorreporting_v1beta1.types.ErrorGroupStats]): The error group stats which match the given request. next_page_token (str): @@ -181,16 +183,16 @@ class ListGroupStatsResponse(proto.Message): def raw_page(self): return self - error_group_stats = proto.RepeatedField( + error_group_stats: MutableSequence["ErrorGroupStats"] = proto.RepeatedField( proto.MESSAGE, number=1, message="ErrorGroupStats", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - time_range_begin = proto.Field( + time_range_begin: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, @@ -220,7 +222,7 @@ class ErrorGroupStats(proto.Message): provided in the error report. If more users are implicitly affected, such as due to a crash of the whole service, this is not reflected here. - timed_counts (Sequence[google.cloud.errorreporting_v1beta1.types.TimedCount]): + timed_counts (MutableSequence[google.cloud.errorreporting_v1beta1.types.TimedCount]): Approximate number of occurrences over time. Timed counts returned by ListGroups are guaranteed to be: @@ -235,7 +237,7 @@ class ErrorGroupStats(proto.Message): Approximate last occurrence that was ever seen for this group and which matches the given filter criteria, ignoring the time_range that was specified in the request. - affected_services (Sequence[google.cloud.errorreporting_v1beta1.types.ServiceContext]): + affected_services (MutableSequence[google.cloud.errorreporting_v1beta1.types.ServiceContext]): Service contexts with a non-zero error count for the given filter criteria. This list can be truncated if multiple services are affected. Refer to ``num_affected_services`` @@ -254,44 +256,44 @@ class ErrorGroupStats(proto.Message): characteristics of the group as a whole. """ - group = proto.Field( + group: common.ErrorGroup = proto.Field( proto.MESSAGE, number=1, message=common.ErrorGroup, ) - count = proto.Field( + count: int = proto.Field( proto.INT64, number=2, ) - affected_users_count = proto.Field( + affected_users_count: int = proto.Field( proto.INT64, number=3, ) - timed_counts = proto.RepeatedField( + timed_counts: MutableSequence["TimedCount"] = proto.RepeatedField( proto.MESSAGE, number=4, message="TimedCount", ) - first_seen_time = proto.Field( + first_seen_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp, ) - last_seen_time = proto.Field( + last_seen_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=6, message=timestamp_pb2.Timestamp, ) - affected_services = proto.RepeatedField( + affected_services: MutableSequence[common.ServiceContext] = proto.RepeatedField( proto.MESSAGE, number=7, message=common.ServiceContext, ) - num_affected_services = proto.Field( + num_affected_services: int = proto.Field( proto.INT32, number=8, ) - representative = proto.Field( + representative: common.ErrorEvent = proto.Field( proto.MESSAGE, number=9, message=common.ErrorEvent, @@ -314,16 +316,16 @@ class TimedCount(proto.Message): End of the time period to which ``count`` refers (excluded). """ - count = proto.Field( + count: int = proto.Field( proto.INT64, number=1, ) - start_time = proto.Field( + start_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp, ) - end_time = proto.Field( + end_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp, @@ -362,29 +364,29 @@ class ListEventsRequest(proto.Message): response. """ - project_name = proto.Field( + project_name: str = proto.Field( proto.STRING, number=1, ) - group_id = proto.Field( + group_id: str = proto.Field( proto.STRING, number=2, ) - service_filter = proto.Field( + service_filter: "ServiceContextFilter" = proto.Field( proto.MESSAGE, number=3, message="ServiceContextFilter", ) - time_range = proto.Field( + time_range: "QueryTimeRange" = proto.Field( proto.MESSAGE, number=4, message="QueryTimeRange", ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=6, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=7, ) @@ -394,7 +396,7 @@ class ListEventsResponse(proto.Message): r"""Contains a set of requested error events. Attributes: - error_events (Sequence[google.cloud.errorreporting_v1beta1.types.ErrorEvent]): + error_events (MutableSequence[google.cloud.errorreporting_v1beta1.types.ErrorEvent]): The error events which match the given request. next_page_token (str): @@ -411,16 +413,16 @@ class ListEventsResponse(proto.Message): def raw_page(self): return self - error_events = proto.RepeatedField( + error_events: MutableSequence[common.ErrorEvent] = proto.RepeatedField( proto.MESSAGE, number=1, message=common.ErrorEvent, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) - time_range_begin = proto.Field( + time_range_begin: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, @@ -446,7 +448,7 @@ class Period(proto.Enum): PERIOD_1_WEEK = 4 PERIOD_30_DAYS = 5 - period = proto.Field( + period: Period = proto.Field( proto.ENUM, number=1, enum=Period, @@ -471,15 +473,15 @@ class ServiceContextFilter(proto.Message): ```ServiceContext.resource_type`` `__. """ - service = proto.Field( + service: str = proto.Field( proto.STRING, number=2, ) - version = proto.Field( + version: str = proto.Field( proto.STRING, number=3, ) - resource_type = proto.Field( + resource_type: str = proto.Field( proto.STRING, number=4, ) @@ -498,7 +500,7 @@ class DeleteEventsRequest(proto.Message): Example: ``projects/my-project-123``. """ - project_name = proto.Field( + project_name: str = proto.Field( proto.STRING, number=1, ) diff --git a/google/cloud/errorreporting_v1beta1/types/report_errors_service.py b/google/cloud/errorreporting_v1beta1/types/report_errors_service.py index 22b901a4..e154c5fe 100644 --- a/google/cloud/errorreporting_v1beta1/types/report_errors_service.py +++ b/google/cloud/errorreporting_v1beta1/types/report_errors_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.cloud.errorreporting_v1beta1.types import common @@ -44,11 +46,11 @@ class ReportErrorEventRequest(proto.Message): Required. The error event to be reported. """ - project_name = proto.Field( + project_name: str = proto.Field( proto.STRING, number=1, ) - event = proto.Field( + event: "ReportedErrorEvent" = proto.Field( proto.MESSAGE, number=2, message="ReportedErrorEvent", @@ -106,21 +108,21 @@ class ReportedErrorEvent(proto.Message): which the error occurred. """ - event_time = proto.Field( + event_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp, ) - service_context = proto.Field( + service_context: common.ServiceContext = proto.Field( proto.MESSAGE, number=2, message=common.ServiceContext, ) - message = proto.Field( + message: str = proto.Field( proto.STRING, number=3, ) - context = proto.Field( + context: common.ErrorContext = proto.Field( proto.MESSAGE, number=4, message=common.ErrorContext, diff --git a/owlbot.py b/owlbot.py index b192af9a..c96feb0a 100644 --- a/owlbot.py +++ b/owlbot.py @@ -1,4 +1,4 @@ -# Copyright 2018 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,40 +12,72 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""This script is used to synthesize generated parts of this library.""" +import json +import os +from pathlib import Path +import shutil + import synthtool as s -from synthtool import gcp +import synthtool.gcp as gcp from synthtool.languages import python -import os -common = gcp.CommonTemplates() +# ---------------------------------------------------------------------------- +# Copy the generated client from the owl-bot staging directory +# ---------------------------------------------------------------------------- + +clean_up_generated_samples = True -default_version = "v1beta1" +# Load the default version defined in .repo-metadata.json. +default_version = json.load(open(".repo-metadata.json", "rt")).get("default_version") for library in s.get_staging_dirs(default_version): - s.move(library, excludes=["nox.py", "setup.py", "README.rst", "docs/index.rst", "google/cloud/errorreporting/"]) + if clean_up_generated_samples: + shutil.rmtree("samples/generated_samples", ignore_errors=True) + clean_up_generated_samples = False + # work around issue where google.cloud.errorreporting is not present + s.replace(library / "google/cloud/errorreporting_v1beta1/__init__.py", + "from google.cloud.errorreporting", + "from google.cloud.errorreporting_v1beta1" + ) + + s.move( + [library], + excludes=[ + "**/gapic_version.py", + "docs/index.rst", + "google/cloud/errorreporting/", + "setup.py", + "testing/constraints-3.7.txt", + ], + ) s.remove_staging_dirs() # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library( - samples=True, # set to True only if there are samples - microgenerator=True, + +templated_files = gcp.CommonTemplates().py_library( cov_level=100, + microgenerator=True, + versions=gcp.common.detect_versions(path="./google", default_first=True), +) +s.move( + templated_files, + excludes=[ + ".coveragerc", + ".github/release-please.yml", + ".github/auto-label.yaml", + "docs/index.rst", + "testing/constraints-3.7.txt", + ], ) -s.move(templated_files, excludes=[".coveragerc", ".github/auto-label.yaml"]) # microgenerator has a good .coveragerc file -# ---------------------------------------------------------------------------- -# Samples templates -# ---------------------------------------------------------------------------- python.py_samples(skip_readmes=True) -python.configure_previous_major_version_branches() - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) - +# run format session for all directories which have a noxfile +for noxfile in Path(".").glob("**/noxfile.py"): + s.shell.run(["nox", "-s", "blacken"], cwd=noxfile.parent, hide_output=False) # -------------------------------------------------------------------------- # Modify test configs @@ -61,5 +93,5 @@ s.move( ".kokoro/common_env_vars.cfg", file_path, - merge=lambda src, dst, _, : f"{dst}\n{src}", + merge=lambda src, dst, _,: f"{dst}\n{src}", ) diff --git a/release-please-config.json b/release-please-config.json new file mode 100644 index 00000000..27b697b9 --- /dev/null +++ b/release-please-config.json @@ -0,0 +1,24 @@ +{ + "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json", + "packages": { + ".": { + "release-type": "python", + "extra-files": [ + "google/cloud/error_reporting/gapic_version.py", + "google/cloud/errorreporting_v1beta1/gapic_version.py", + { + "type": "json", + "path": "samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json", + "jsonpath": "$.clientLibrary.version" + } + ] + } + }, + "release-type": "python", + "plugins": [ + { + "type": "sentence-case" + } + ], + "initial-version": "0.1.0" +} diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py index 7b49e8b8..b4a207ad 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py @@ -20,7 +20,7 @@ # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-errorreporting +# python3 -m pip install google-cloud-error-reporting # [START clouderrorreporting_v1beta1_generated_ErrorGroupService_GetGroup_async] diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py index 28d64428..e4f473de 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py @@ -20,7 +20,7 @@ # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-errorreporting +# python3 -m pip install google-cloud-error-reporting # [START clouderrorreporting_v1beta1_generated_ErrorGroupService_GetGroup_sync] diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py index f0a68ab3..6ee4a823 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py @@ -20,7 +20,7 @@ # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-errorreporting +# python3 -m pip install google-cloud-error-reporting # [START clouderrorreporting_v1beta1_generated_ErrorGroupService_UpdateGroup_async] diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py index b021265a..4f8f6bca 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py @@ -20,7 +20,7 @@ # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-errorreporting +# python3 -m pip install google-cloud-error-reporting # [START clouderrorreporting_v1beta1_generated_ErrorGroupService_UpdateGroup_sync] diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py index e30df345..bc8eb7be 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py @@ -20,7 +20,7 @@ # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-errorreporting +# python3 -m pip install google-cloud-error-reporting # [START clouderrorreporting_v1beta1_generated_ErrorStatsService_DeleteEvents_async] diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py index 0b16d477..c03edfb3 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py @@ -20,7 +20,7 @@ # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-errorreporting +# python3 -m pip install google-cloud-error-reporting # [START clouderrorreporting_v1beta1_generated_ErrorStatsService_DeleteEvents_sync] diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py index 9d25b821..0cd82da6 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py @@ -20,7 +20,7 @@ # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-errorreporting +# python3 -m pip install google-cloud-error-reporting # [START clouderrorreporting_v1beta1_generated_ErrorStatsService_ListEvents_async] diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py index 19cbf488..dbed18fa 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py @@ -20,7 +20,7 @@ # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-errorreporting +# python3 -m pip install google-cloud-error-reporting # [START clouderrorreporting_v1beta1_generated_ErrorStatsService_ListEvents_sync] diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py index 25ae2ff0..c78b943f 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py @@ -20,7 +20,7 @@ # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-errorreporting +# python3 -m pip install google-cloud-error-reporting # [START clouderrorreporting_v1beta1_generated_ErrorStatsService_ListGroupStats_async] diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py index 0d779b46..a90ceaef 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py @@ -20,7 +20,7 @@ # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-errorreporting +# python3 -m pip install google-cloud-error-reporting # [START clouderrorreporting_v1beta1_generated_ErrorStatsService_ListGroupStats_sync] diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py index f836815d..400a567a 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py @@ -20,7 +20,7 @@ # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-errorreporting +# python3 -m pip install google-cloud-error-reporting # [START clouderrorreporting_v1beta1_generated_ReportErrorsService_ReportErrorEvent_async] diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py index 575f57aa..2260dd1b 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py @@ -20,7 +20,7 @@ # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-errorreporting +# python3 -m pip install google-cloud-error-reporting # [START clouderrorreporting_v1beta1_generated_ReportErrorsService_ReportErrorEvent_sync] diff --git a/samples/generated_samples/snippet_metadata_errorreporting_v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json similarity index 99% rename from samples/generated_samples/snippet_metadata_errorreporting_v1beta1.json rename to samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index 4d088480..fedd655c 100644 --- a/samples/generated_samples/snippet_metadata_errorreporting_v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -7,7 +7,8 @@ } ], "language": "PYTHON", - "name": "google-cloud-errorreporting" + "name": "google-cloud-error-reporting", + "version": "0.1.0" }, "snippets": [ { diff --git a/setup.py b/setup.py index 23a8f2ef..56ed1ec5 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,5 @@ -# Copyright 2018 Google LLC +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,33 +12,38 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +# import io import os -import setuptools +import setuptools # type: ignore - -# Package metadata. +package_root = os.path.abspath(os.path.dirname(__file__)) name = "google-cloud-error-reporting" -description = "Error Reporting API client library" -version = "1.6.3" -# Should be one of: -# 'Development Status :: 3 - Alpha' -# 'Development Status :: 4 - Beta' -# 'Development Status :: 5 - Production/Stable' -release_status = "Development Status :: 4 - Beta" + + +description = "Google Cloud Error Reporting API client library" + +version = {} +with open( + os.path.join(package_root, "google/cloud/error_reporting/gapic_version.py") +) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + dependencies = [ "google-cloud-logging>=1.14.0, <4.0.0dev", - "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] -extras = {} - - -# Setup boilerplate below this line. +url = "https://github.com/googleapis/python-error-reporting" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -45,20 +51,16 @@ with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() -# Only include packages under the 'google' namespace. Do not include tests, -# benchmarks, etc. packages = [ package for package in setuptools.PEP420PackageFinder.find() if package.startswith("google") ] -# Determine which namespaces are needed. namespaces = ["google"] if "google.cloud" in packages: namespaces.append("google.cloud") - setuptools.setup( name=name, version=version, @@ -67,7 +69,7 @@ author="Google LLC", author_email="googleapis-packages@google.com", license="Apache 2.0", - url="https://github.com/googleapis/python-error-reporting", + url=url, classifiers=[ release_status, "Intended Audience :: Developers", @@ -83,11 +85,9 @@ ], platforms="Posix; MacOS X; Windows", packages=packages, + python_requires=">=3.7", namespace_packages=namespaces, install_requires=dependencies, - extras_require=extras, - python_requires=">=3.7", - scripts=["scripts/fixup_errorreporting_v1beta1_keywords.py"], include_package_data=True, zip_safe=False, ) diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt index e69de29b..ed7f9aed 100644 --- a/testing/constraints-3.10.txt +++ b/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt index e69de29b..ed7f9aed 100644 --- a/testing/constraints-3.11.txt +++ b/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index c4e51385..1b92bfe8 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -1,11 +1,10 @@ # This constraints file is used to check that lower bounds # are correct in setup.py -# List *all* library dependencies and extras in this file. +# List all library dependencies and extras in this file. # Pin the version to the lower bound. -# -# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -# Then this file should have foo==1.14.0 -google-cloud-logging==1.14.0 -google-api-core==1.32.0 +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 +google-cloud-logging==1.14.0 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index e69de29b..ed7f9aed 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index e69de29b..ed7f9aed 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf From b1fbd7c6f9d8aae6d58fe9ac85e206a6cfb81b65 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 15 Dec 2022 14:08:50 -0500 Subject: [PATCH 035/111] chore(main): release 1.7.0 (#403) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 21 +++++++++++++++++++ google/cloud/error_reporting/gapic_version.py | 2 +- .../errorreporting_v1beta1/gapic_version.py | 2 +- ....devtools.clouderrorreporting.v1beta1.json | 2 +- 5 files changed, 25 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 2a8d04b3..7588679c 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.6.3" + ".": "1.7.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 945f6fb4..d23ea5cf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,27 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## [1.7.0](https://github.com/googleapis/python-error-reporting/compare/v1.6.3...v1.7.0) (2022-12-15) + + +### Features + +* Add typing to proto.Message based class attributes ([ccaa40f](https://github.com/googleapis/python-error-reporting/commit/ccaa40f1eca7d001cb58cd340189d521e93632ff)) + + +### Bug Fixes + +* Add dict typing for client_options ([ccaa40f](https://github.com/googleapis/python-error-reporting/commit/ccaa40f1eca7d001cb58cd340189d521e93632ff)) +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([ccaa40f](https://github.com/googleapis/python-error-reporting/commit/ccaa40f1eca7d001cb58cd340189d521e93632ff)) +* Drop usage of pkg_resources ([ccaa40f](https://github.com/googleapis/python-error-reporting/commit/ccaa40f1eca7d001cb58cd340189d521e93632ff)) +* Fix timeout default values ([ccaa40f](https://github.com/googleapis/python-error-reporting/commit/ccaa40f1eca7d001cb58cd340189d521e93632ff)) + + +### Documentation + +* **samples:** Snippetgen handling of repeated enum field ([ccaa40f](https://github.com/googleapis/python-error-reporting/commit/ccaa40f1eca7d001cb58cd340189d521e93632ff)) +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([ccaa40f](https://github.com/googleapis/python-error-reporting/commit/ccaa40f1eca7d001cb58cd340189d521e93632ff)) + ## [1.6.3](https://github.com/googleapis/python-error-reporting/compare/v1.6.2...v1.6.3) (2022-10-07) diff --git a/google/cloud/error_reporting/gapic_version.py b/google/cloud/error_reporting/gapic_version.py index a7807d3a..f033c615 100644 --- a/google/cloud/error_reporting/gapic_version.py +++ b/google/cloud/error_reporting/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.6.3" # {x-release-please-version} +__version__ = "1.7.0" # {x-release-please-version} diff --git a/google/cloud/errorreporting_v1beta1/gapic_version.py b/google/cloud/errorreporting_v1beta1/gapic_version.py index a7807d3a..f033c615 100644 --- a/google/cloud/errorreporting_v1beta1/gapic_version.py +++ b/google/cloud/errorreporting_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.6.3" # {x-release-please-version} +__version__ = "1.7.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index fedd655c..b365b419 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "0.1.0" + "version": "1.7.0" }, "snippets": [ { From 13ecd816f79db680d1d885e571c454a5565b1e60 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 Jan 2023 08:21:55 -0500 Subject: [PATCH 036/111] chore(python): add support for python 3.11 (#404) Source-Link: https://github.com/googleapis/synthtool/commit/7197a001ffb6d8ce7b0b9b11c280f0c536c1033a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 5 +- .github/workflows/unittest.yml | 2 +- .kokoro/samples/python3.11/common.cfg | 59 ++++++++++++++++++++ .kokoro/samples/python3.11/continuous.cfg | 6 ++ .kokoro/samples/python3.11/periodic-head.cfg | 11 ++++ .kokoro/samples/python3.11/periodic.cfg | 6 ++ .kokoro/samples/python3.11/presubmit.cfg | 6 ++ CONTRIBUTING.rst | 6 +- noxfile.py | 2 +- 9 files changed, 96 insertions(+), 7 deletions(-) create mode 100644 .kokoro/samples/python3.11/common.cfg create mode 100644 .kokoro/samples/python3.11/continuous.cfg create mode 100644 .kokoro/samples/python3.11/periodic-head.cfg create mode 100644 .kokoro/samples/python3.11/periodic.cfg create mode 100644 .kokoro/samples/python3.11/presubmit.cfg diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index df2cfe5d..889f77df 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 - + digest: sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 23000c05..8057a769 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10'] + python: ['3.7', '3.8', '3.9', '3.10', '3.11'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/.kokoro/samples/python3.11/common.cfg b/.kokoro/samples/python3.11/common.cfg new file mode 100644 index 00000000..d83d3eab --- /dev/null +++ b/.kokoro/samples/python3.11/common.cfg @@ -0,0 +1,59 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.11" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-311" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-error-reporting/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-error-reporting/.kokoro/trampoline_v2.sh" + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "error-reporting" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/.kokoro/samples/python3.11/continuous.cfg b/.kokoro/samples/python3.11/continuous.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.11/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.11/periodic-head.cfg b/.kokoro/samples/python3.11/periodic-head.cfg new file mode 100644 index 00000000..0ab001ca --- /dev/null +++ b/.kokoro/samples/python3.11/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-error-reporting/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.11/periodic.cfg b/.kokoro/samples/python3.11/periodic.cfg new file mode 100644 index 00000000..71cd1e59 --- /dev/null +++ b/.kokoro/samples/python3.11/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.11/presubmit.cfg b/.kokoro/samples/python3.11/presubmit.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.11/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 795ac991..f4f9bf83 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.10 -- -k + $ nox -s unit-3.11 -- -k .. note:: @@ -225,11 +225,13 @@ We support: - `Python 3.8`_ - `Python 3.9`_ - `Python 3.10`_ +- `Python 3.11`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/noxfile.py b/noxfile.py index d752b54f..d1f24967 100644 --- a/noxfile.py +++ b/noxfile.py @@ -31,7 +31,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", From d5e0c4cf0cb40aab80146cad16141217b1998b30 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 9 Jan 2023 23:05:33 -0500 Subject: [PATCH 037/111] feat: Add support for python 3.11 (#405) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add support for python 3.11 chore: Update gapic-generator-python to v1.8.0 PiperOrigin-RevId: 500768693 Source-Link: https://github.com/googleapis/googleapis/commit/190b612e3d0ff8f025875a669e5d68a1446d43c1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/7bf29a414b9ecac3170f0b65bdc2a95705c0ef1a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2JmMjlhNDE0YjllY2FjMzE3MGYwYjY1YmRjMmE5NTcwNWMwZWYxYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * require proto-plus 1.22.2 for python 3.11 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../services/error_group_service/async_client.py | 2 +- .../services/error_group_service/client.py | 2 +- .../services/error_stats_service/async_client.py | 2 +- .../services/error_stats_service/client.py | 2 +- .../services/report_errors_service/async_client.py | 2 +- .../services/report_errors_service/client.py | 2 +- ...etadata_google.devtools.clouderrorreporting.v1beta1.json | 2 +- setup.py | 2 ++ testing/constraints-3.12.txt | 6 ++++++ 9 files changed, 15 insertions(+), 7 deletions(-) create mode 100644 testing/constraints-3.12.txt diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py index ac13ab34..a8f33bee 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py @@ -135,7 +135,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py index b2224989..38f39a81 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py @@ -284,7 +284,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py index 84c42d3e..1b45e394 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py @@ -138,7 +138,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py index 39b6b5fd..ddcba84d 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py @@ -287,7 +287,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py index abd5207b..cc5f39f3 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py @@ -130,7 +130,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py index 984a993e..ca7d9a56 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py @@ -266,7 +266,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index b365b419..fedd655c 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "1.7.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/setup.py b/setup.py index 56ed1ec5..db8b4288 100644 --- a/setup.py +++ b/setup.py @@ -41,6 +41,7 @@ "google-cloud-logging>=1.14.0, <4.0.0dev", "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-error-reporting" @@ -80,6 +81,7 @@ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt new file mode 100644 index 00000000..ed7f9aed --- /dev/null +++ b/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf From 0ec436b0404876574659697a7884c95fc4d85c27 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 10 Jan 2023 13:26:46 -0500 Subject: [PATCH 038/111] chore(main): release 1.8.0 (#406) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 7 +++++++ google/cloud/error_reporting/gapic_version.py | 2 +- google/cloud/errorreporting_v1beta1/gapic_version.py | 2 +- ...tadata_google.devtools.clouderrorreporting.v1beta1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 7588679c..3800c069 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.7.0" + ".": "1.8.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index d23ea5cf..62d5e28a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## [1.8.0](https://github.com/googleapis/python-error-reporting/compare/v1.7.0...v1.8.0) (2023-01-10) + + +### Features + +* Add support for python 3.11 ([#405](https://github.com/googleapis/python-error-reporting/issues/405)) ([d5e0c4c](https://github.com/googleapis/python-error-reporting/commit/d5e0c4cf0cb40aab80146cad16141217b1998b30)) + ## [1.7.0](https://github.com/googleapis/python-error-reporting/compare/v1.6.3...v1.7.0) (2022-12-15) diff --git a/google/cloud/error_reporting/gapic_version.py b/google/cloud/error_reporting/gapic_version.py index f033c615..b334dccf 100644 --- a/google/cloud/error_reporting/gapic_version.py +++ b/google/cloud/error_reporting/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.0" # {x-release-please-version} +__version__ = "1.8.0" # {x-release-please-version} diff --git a/google/cloud/errorreporting_v1beta1/gapic_version.py b/google/cloud/errorreporting_v1beta1/gapic_version.py index f033c615..b334dccf 100644 --- a/google/cloud/errorreporting_v1beta1/gapic_version.py +++ b/google/cloud/errorreporting_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.7.0" # {x-release-please-version} +__version__ = "1.8.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index fedd655c..29f4d27e 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "0.1.0" + "version": "1.8.0" }, "snippets": [ { From 26a074998b0c0a0697ed03086f7e1f6c4b77e35a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 20 Jan 2023 13:39:20 -0500 Subject: [PATCH 039/111] docs: Add documentation for enums (#407) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Add documentation for enums fix: Add context manager return types chore: Update gapic-generator-python to v1.8.1 PiperOrigin-RevId: 503210727 Source-Link: https://github.com/googleapis/googleapis/commit/a391fd1dac18dfdfa00c18c8404f2c3a6ff8e98e Source-Link: https://github.com/googleapis/googleapis-gen/commit/0080f830dec37c3384157082bce279e37079ea58 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDA4MGY4MzBkZWMzN2MzMzg0MTU3MDgyYmNlMjc5ZTM3MDc5ZWE1OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/error_group_service/client.py | 2 +- .../services/error_stats_service/client.py | 2 +- .../services/report_errors_service/client.py | 2 +- .../errorreporting_v1beta1/types/common.py | 21 ++++++- .../types/error_stats_service.py | 63 ++++++++++++++++++- ....devtools.clouderrorreporting.v1beta1.json | 2 +- 6 files changed, 85 insertions(+), 7 deletions(-) diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py index 38f39a81..f52ea951 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py @@ -647,7 +647,7 @@ def sample_update_group(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "ErrorGroupServiceClient": return self def __exit__(self, type, value, traceback): diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py index ddcba84d..1033a8c9 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py @@ -818,7 +818,7 @@ def sample_delete_events(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "ErrorStatsServiceClient": return self def __exit__(self, type, value, traceback): diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py index ca7d9a56..86ea2339 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py @@ -557,7 +557,7 @@ def sample_report_error_event(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "ReportErrorsServiceClient": return self def __exit__(self, type, value, traceback): diff --git a/google/cloud/errorreporting_v1beta1/types/common.py b/google/cloud/errorreporting_v1beta1/types/common.py index e84ca351..2f04117b 100644 --- a/google/cloud/errorreporting_v1beta1/types/common.py +++ b/google/cloud/errorreporting_v1beta1/types/common.py @@ -36,7 +36,26 @@ class ResolutionStatus(proto.Enum): - r"""Resolution status of an error group.""" + r"""Resolution status of an error group. + + Values: + RESOLUTION_STATUS_UNSPECIFIED (0): + Status is unknown. When left unspecified in + requests, it is treated like OPEN. + OPEN (1): + The error group is not being addressed. This + is the default for new groups. It is also used + for errors re-occurring after marked RESOLVED. + ACKNOWLEDGED (2): + Error Group manually acknowledged, it can + have an issue link attached. + RESOLVED (3): + Error Group manually resolved, more events + for this group are not expected to occur. + MUTED (4): + The error group is muted and excluded by + default on group stats requests. + """ RESOLUTION_STATUS_UNSPECIFIED = 0 OPEN = 1 ACKNOWLEDGED = 2 diff --git a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py index e28c7dde..60d0f90a 100644 --- a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py +++ b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py @@ -44,6 +44,25 @@ class TimedCountAlignment(proto.Enum): r"""Specifies how the time periods of error group counts are aligned. + + Values: + ERROR_COUNT_ALIGNMENT_UNSPECIFIED (0): + No alignment specified. + ALIGNMENT_EQUAL_ROUNDED (1): + The time periods shall be consecutive, have width equal to + the requested duration, and be aligned at the + ``alignment_time`` provided in the request. The + ``alignment_time`` does not have to be inside the query + period but even if it is outside, only time periods are + returned which overlap with the query period. A rounded + alignment will typically result in a different size of the + first or the last time period. + ALIGNMENT_EQUAL_AT_END (2): + The time periods shall be consecutive, have + width equal to the requested duration, and be + aligned at the end of the requested time period. + This can result in a different size of the first + time period. """ ERROR_COUNT_ALIGNMENT_UNSPECIFIED = 0 ALIGNMENT_EQUAL_ROUNDED = 1 @@ -51,7 +70,24 @@ class TimedCountAlignment(proto.Enum): class ErrorGroupOrder(proto.Enum): - r"""A sorting order of error groups.""" + r"""A sorting order of error groups. + + Values: + GROUP_ORDER_UNSPECIFIED (0): + No group order specified. + COUNT_DESC (1): + Total count of errors in the given time + window in descending order. + LAST_SEEN_DESC (2): + Timestamp when the group was last seen in the + given time window in descending order. + CREATED_DESC (3): + Timestamp when the group was created in + descending order. + AFFECTED_USERS_DESC (4): + Number of affected users in the given time + window in descending order. + """ GROUP_ORDER_UNSPECIFIED = 0 COUNT_DESC = 1 LAST_SEEN_DESC = 2 @@ -440,7 +476,30 @@ class QueryTimeRange(proto.Message): """ class Period(proto.Enum): - r"""The supported time ranges.""" + r"""The supported time ranges. + + Values: + PERIOD_UNSPECIFIED (0): + Do not use. + PERIOD_1_HOUR (1): + Retrieve data for the last hour. + Recommended minimum timed count duration: 1 min. + PERIOD_6_HOURS (2): + Retrieve data for the last 6 hours. + Recommended minimum timed count duration: 10 + min. + PERIOD_1_DAY (3): + Retrieve data for the last day. + Recommended minimum timed count duration: 1 + hour. + PERIOD_1_WEEK (4): + Retrieve data for the last week. + Recommended minimum timed count duration: 6 + hours. + PERIOD_30_DAYS (5): + Retrieve data for the last 30 days. + Recommended minimum timed count duration: 1 day. + """ PERIOD_UNSPECIFIED = 0 PERIOD_1_HOUR = 1 PERIOD_6_HOURS = 2 diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index 29f4d27e..fedd655c 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "1.8.0" + "version": "0.1.0" }, "snippets": [ { From 67ba6c7b0b9342a2b9b1e9391514ee480691cab8 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 23 Jan 2023 10:49:12 -0500 Subject: [PATCH 040/111] chore(main): release 1.8.1 (#408) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 12 ++++++++++++ google/cloud/error_reporting/gapic_version.py | 2 +- google/cloud/errorreporting_v1beta1/gapic_version.py | 2 +- ..._google.devtools.clouderrorreporting.v1beta1.json | 2 +- 5 files changed, 16 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 3800c069..064e56a8 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.8.0" + ".": "1.8.1" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 62d5e28a..2e074430 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## [1.8.1](https://github.com/googleapis/python-error-reporting/compare/v1.8.0...v1.8.1) (2023-01-20) + + +### Bug Fixes + +* Add context manager return types ([26a0749](https://github.com/googleapis/python-error-reporting/commit/26a074998b0c0a0697ed03086f7e1f6c4b77e35a)) + + +### Documentation + +* Add documentation for enums ([26a0749](https://github.com/googleapis/python-error-reporting/commit/26a074998b0c0a0697ed03086f7e1f6c4b77e35a)) + ## [1.8.0](https://github.com/googleapis/python-error-reporting/compare/v1.7.0...v1.8.0) (2023-01-10) diff --git a/google/cloud/error_reporting/gapic_version.py b/google/cloud/error_reporting/gapic_version.py index b334dccf..90e02933 100644 --- a/google/cloud/error_reporting/gapic_version.py +++ b/google/cloud/error_reporting/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.0" # {x-release-please-version} +__version__ = "1.8.1" # {x-release-please-version} diff --git a/google/cloud/errorreporting_v1beta1/gapic_version.py b/google/cloud/errorreporting_v1beta1/gapic_version.py index b334dccf..90e02933 100644 --- a/google/cloud/errorreporting_v1beta1/gapic_version.py +++ b/google/cloud/errorreporting_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.0" # {x-release-please-version} +__version__ = "1.8.1" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index fedd655c..c3cb9985 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "0.1.0" + "version": "1.8.1" }, "snippets": [ { From 1a720acedf71a41da5c1a17a405093c9d930aca8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 25 Jan 2023 12:36:51 -0500 Subject: [PATCH 041/111] chore: Update gapic-generator-python to v1.8.2 (#409) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.8.2 PiperOrigin-RevId: 504289125 Source-Link: https://github.com/googleapis/googleapis/commit/38a48a44a44279e9cf9f2f864b588958a2d87491 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b2dc22663dbe47a972c8d8c2f8a4df013dafdcbc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjJkYzIyNjYzZGJlNDdhOTcyYzhkOGMyZjhhNGRmMDEzZGFmZGNiYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * remove workaround Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .coveragerc | 1 + owlbot.py | 6 ------ ...etadata_google.devtools.clouderrorreporting.v1beta1.json | 2 +- 3 files changed, 2 insertions(+), 7 deletions(-) diff --git a/.coveragerc b/.coveragerc index 67733e5c..9aac2710 100644 --- a/.coveragerc +++ b/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/cloud/errorreporting/__init__.py + google/cloud/errorreporting/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/owlbot.py b/owlbot.py index c96feb0a..0a02758f 100644 --- a/owlbot.py +++ b/owlbot.py @@ -35,12 +35,6 @@ shutil.rmtree("samples/generated_samples", ignore_errors=True) clean_up_generated_samples = False - # work around issue where google.cloud.errorreporting is not present - s.replace(library / "google/cloud/errorreporting_v1beta1/__init__.py", - "from google.cloud.errorreporting", - "from google.cloud.errorreporting_v1beta1" - ) - s.move( [library], excludes=[ diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index c3cb9985..fedd655c 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "1.8.1" + "version": "0.1.0" }, "snippets": [ { From 71fb6146b38a61a9d24fdd1444cf04f74a45248a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Jan 2023 16:42:13 +0000 Subject: [PATCH 042/111] chore: fix prerelease_deps nox session [autoapprove] (#410) Source-Link: https://togithub.com/googleapis/synthtool/commit/26c7505b2f76981ec1707b851e1595c8c06e90fc Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 --- .github/.OwlBot.lock.yaml | 2 +- noxfile.py | 14 ++++++-------- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 889f77df..f0f3b24b 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 + digest: sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 diff --git a/noxfile.py b/noxfile.py index d1f24967..e9649dc6 100644 --- a/noxfile.py +++ b/noxfile.py @@ -188,9 +188,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -345,9 +345,7 @@ def prerelease_deps(session): unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES session.install(*unit_deps_all) system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS + SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES ) session.install(*system_deps_all) @@ -377,8 +375,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", From 3a0d82db6a425b91430be0ee84fd9c957f39af00 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 4 Feb 2023 06:41:44 -0500 Subject: [PATCH 043/111] docs: removed link to the regionalization page (#411) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: removed link to the regionalization page PiperOrigin-RevId: 506926790 Source-Link: https://github.com/googleapis/googleapis/commit/7295c41168dfc72307767bcff534582e3ccc8d4d Source-Link: https://github.com/googleapis/googleapis-gen/commit/c132408dc7a2b4705f68990eb3208f74b552eb88 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYzEzMjQwOGRjN2EyYjQ3MDVmNjg5OTBlYjMyMDhmNzRiNTUyZWI4OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/report_errors_service/async_client.py | 3 --- .../services/report_errors_service/client.py | 3 --- .../services/report_errors_service/transports/grpc.py | 3 --- .../services/report_errors_service/transports/grpc_asyncio.py | 3 --- 4 files changed, 12 deletions(-) diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py index cc5f39f3..7ccd15ec 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py @@ -237,9 +237,6 @@ async def report_error_event( in regional log buckets or logs routed to other Google Cloud projects. - For more information, see `Using Error Reporting with - regionalized logs `__. - .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py index 86ea2339..a7883f1f 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py @@ -443,9 +443,6 @@ def report_error_event( in regional log buckets or logs routed to other Google Cloud projects. - For more information, see `Using Error Reporting with - regionalized logs `__. - .. code-block:: python # This snippet has been automatically generated and should be regarded as a diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py index fb212ee3..0ff8e435 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py @@ -252,9 +252,6 @@ def report_error_event( in regional log buckets or logs routed to other Google Cloud projects. - For more information, see `Using Error Reporting with - regionalized logs `__. - Returns: Callable[[~.ReportErrorEventRequest], ~.ReportErrorEventResponse]: diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py index 44636c83..eea7a486 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py @@ -255,9 +255,6 @@ def report_error_event( in regional log buckets or logs routed to other Google Cloud projects. - For more information, see `Using Error Reporting with - regionalized logs `__. - Returns: Callable[[~.ReportErrorEventRequest], Awaitable[~.ReportErrorEventResponse]]: From 555474e8e2ee561b66261930b7c7077c64588678 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 7 Feb 2023 10:31:18 -0500 Subject: [PATCH 044/111] chore(main): release 1.8.2 (#412) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 7 +++++++ google/cloud/error_reporting/gapic_version.py | 2 +- google/cloud/errorreporting_v1beta1/gapic_version.py | 2 +- ...tadata_google.devtools.clouderrorreporting.v1beta1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 064e56a8..7017b7cf 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.8.1" + ".": "1.8.2" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 2e074430..cebcaae1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## [1.8.2](https://github.com/googleapis/python-error-reporting/compare/v1.8.1...v1.8.2) (2023-02-04) + + +### Documentation + +* Removed link to the regionalization page ([#411](https://github.com/googleapis/python-error-reporting/issues/411)) ([3a0d82d](https://github.com/googleapis/python-error-reporting/commit/3a0d82db6a425b91430be0ee84fd9c957f39af00)) + ## [1.8.1](https://github.com/googleapis/python-error-reporting/compare/v1.8.0...v1.8.1) (2023-01-20) diff --git a/google/cloud/error_reporting/gapic_version.py b/google/cloud/error_reporting/gapic_version.py index 90e02933..9a1b07ea 100644 --- a/google/cloud/error_reporting/gapic_version.py +++ b/google/cloud/error_reporting/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.1" # {x-release-please-version} +__version__ = "1.8.2" # {x-release-please-version} diff --git a/google/cloud/errorreporting_v1beta1/gapic_version.py b/google/cloud/errorreporting_v1beta1/gapic_version.py index 90e02933..9a1b07ea 100644 --- a/google/cloud/errorreporting_v1beta1/gapic_version.py +++ b/google/cloud/errorreporting_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.1" # {x-release-please-version} +__version__ = "1.8.2" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index fedd655c..9a89b962 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "0.1.0" + "version": "1.8.2" }, "snippets": [ { From 52f4d6b108e23e2c0e1e0780afdb4c35dfb8ef7f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 15:26:25 +0000 Subject: [PATCH 045/111] build(deps): bump cryptography from 38.0.3 to 39.0.1 in /synthtool/gcp/templates/python_library/.kokoro (#417) Source-Link: https://togithub.com/googleapis/synthtool/commit/bb171351c3946d3c3c32e60f5f18cee8c464ec51 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.txt | 49 ++++++++++++++++++--------------------- 2 files changed, 23 insertions(+), 28 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index f0f3b24b..894fb6bc 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 + digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 05dc4672..096e4800 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -113,33 +113,28 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==38.0.3 \ - --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ - --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ - --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ - --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ - --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ - --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ - --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ - --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ - --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ - --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ - --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ - --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ - --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ - --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ - --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ - --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ - --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ - --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ - --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ - --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ - --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ - --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ - --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ - --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ - --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ - --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 +cryptography==39.0.1 \ + --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ + --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ + --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ + --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ + --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ + --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ + --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ + --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ + --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ + --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ + --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ + --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ + --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ + --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ + --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ + --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ + --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ + --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ + --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ + --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ + --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 # via # gcp-releasetool # secretstorage From bfc7a4e42b40d1df436ee60f8b62eb34fa3c5138 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 27 Feb 2023 11:55:23 -0500 Subject: [PATCH 046/111] chore(python): upgrade gcp-releasetool in .kokoro [autoapprove] (#419) Source-Link: https://github.com/googleapis/synthtool/commit/5f2a6089f73abf06238fe4310f6a14d6f6d1eed3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.in | 2 +- .kokoro/requirements.txt | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 894fb6bc..5fc5daa3 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf + digest: sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index cbd7e77f..882178ce 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool +gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x importlib-metadata typing-extensions twine diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 096e4800..fa99c129 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -154,9 +154,9 @@ gcp-docuploader==0.6.4 \ --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.10.0 \ - --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ - --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d +gcp-releasetool==1.10.5 \ + --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ + --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 # via -r requirements.in google-api-core==2.10.2 \ --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ From b2b9eab649f413d7ad8a47fd660f9e2bcc32a820 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 28 Feb 2023 06:45:51 -0500 Subject: [PATCH 047/111] feat: enable "rest" transport in Python for services supporting numeric enums (#418) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: enable "rest" transport in Python for services supporting numeric enums PiperOrigin-RevId: 508143576 Source-Link: https://github.com/googleapis/googleapis/commit/7a702a989db3b413f39ff8994ca53fb38b6928c2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6ad1279c0e7aa787ac6b66c9fd4a210692edffcd Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNmFkMTI3OWMwZTdhYTc4N2FjNmI2NmM5ZmQ0YTIxMDY5MmVkZmZjZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.8.5 PiperOrigin-RevId: 511892190 Source-Link: https://github.com/googleapis/googleapis/commit/a45d9c09c1287ffdf938f4e8083e791046c0b23b Source-Link: https://github.com/googleapis/googleapis-gen/commit/1907294b1d8365ea24f8c5f2e059a64124c4ed3b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTkwNzI5NGIxZDgzNjVlYTI0ZjhjNWYyZTA1OWE2NDEyNGM0ZWQzYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../gapic_metadata.json | 45 + .../services/error_group_service/client.py | 2 + .../transports/__init__.py | 5 + .../error_group_service/transports/rest.py | 447 +++++++ .../services/error_stats_service/client.py | 2 + .../transports/__init__.py | 5 + .../error_stats_service/transports/rest.py | 579 +++++++++ .../services/report_errors_service/client.py | 2 + .../transports/__init__.py | 5 + .../report_errors_service/transports/rest.py | 329 +++++ .../errorreporting_v1beta1/types/common.py | 2 + .../types/error_group_service.py | 2 + .../types/error_stats_service.py | 2 + .../types/report_errors_service.py | 2 + ....devtools.clouderrorreporting.v1beta1.json | 2 +- .../test_error_group_service.py | 644 +++++++++- .../test_error_stats_service.py | 1091 ++++++++++++++++- .../test_report_errors_service.py | 441 ++++++- 18 files changed, 3594 insertions(+), 13 deletions(-) create mode 100644 google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py create mode 100644 google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py create mode 100644 google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py diff --git a/google/cloud/errorreporting_v1beta1/gapic_metadata.json b/google/cloud/errorreporting_v1beta1/gapic_metadata.json index 1b6fc5e2..825275e6 100644 --- a/google/cloud/errorreporting_v1beta1/gapic_metadata.json +++ b/google/cloud/errorreporting_v1beta1/gapic_metadata.json @@ -36,6 +36,21 @@ ] } } + }, + "rest": { + "libraryClient": "ErrorGroupServiceClient", + "rpcs": { + "GetGroup": { + "methods": [ + "get_group" + ] + }, + "UpdateGroup": { + "methods": [ + "update_group" + ] + } + } } } }, @@ -80,6 +95,26 @@ ] } } + }, + "rest": { + "libraryClient": "ErrorStatsServiceClient", + "rpcs": { + "DeleteEvents": { + "methods": [ + "delete_events" + ] + }, + "ListEvents": { + "methods": [ + "list_events" + ] + }, + "ListGroupStats": { + "methods": [ + "list_group_stats" + ] + } + } } } }, @@ -104,6 +139,16 @@ ] } } + }, + "rest": { + "libraryClient": "ReportErrorsServiceClient", + "rpcs": { + "ReportErrorEvent": { + "methods": [ + "report_error_event" + ] + } + } } } } diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py index f52ea951..07f0aead 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py @@ -51,6 +51,7 @@ from .transports.base import ErrorGroupServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import ErrorGroupServiceGrpcTransport from .transports.grpc_asyncio import ErrorGroupServiceGrpcAsyncIOTransport +from .transports.rest import ErrorGroupServiceRestTransport class ErrorGroupServiceClientMeta(type): @@ -66,6 +67,7 @@ class ErrorGroupServiceClientMeta(type): ) # type: Dict[str, Type[ErrorGroupServiceTransport]] _transport_registry["grpc"] = ErrorGroupServiceGrpcTransport _transport_registry["grpc_asyncio"] = ErrorGroupServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ErrorGroupServiceRestTransport def get_transport_class( cls, diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py index 873035d9..b6d0629c 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py @@ -19,15 +19,20 @@ from .base import ErrorGroupServiceTransport from .grpc import ErrorGroupServiceGrpcTransport from .grpc_asyncio import ErrorGroupServiceGrpcAsyncIOTransport +from .rest import ErrorGroupServiceRestTransport +from .rest import ErrorGroupServiceRestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ErrorGroupServiceTransport]] _transport_registry["grpc"] = ErrorGroupServiceGrpcTransport _transport_registry["grpc_asyncio"] = ErrorGroupServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ErrorGroupServiceRestTransport __all__ = ( "ErrorGroupServiceTransport", "ErrorGroupServiceGrpcTransport", "ErrorGroupServiceGrpcAsyncIOTransport", + "ErrorGroupServiceRestTransport", + "ErrorGroupServiceRestInterceptor", ) diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py new file mode 100644 index 00000000..a85ea6ea --- /dev/null +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py @@ -0,0 +1,447 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.errorreporting_v1beta1.types import common +from google.cloud.errorreporting_v1beta1.types import error_group_service + +from .base import ( + ErrorGroupServiceTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ErrorGroupServiceRestInterceptor: + """Interceptor for ErrorGroupService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ErrorGroupServiceRestTransport. + + .. code-block:: python + class MyCustomErrorGroupServiceInterceptor(ErrorGroupServiceRestInterceptor): + def pre_get_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_group(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ErrorGroupServiceRestTransport(interceptor=MyCustomErrorGroupServiceInterceptor()) + client = ErrorGroupServiceClient(transport=transport) + + + """ + + def pre_get_group( + self, + request: error_group_service.GetGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[error_group_service.GetGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the ErrorGroupService server. + """ + return request, metadata + + def post_get_group(self, response: common.ErrorGroup) -> common.ErrorGroup: + """Post-rpc interceptor for get_group + + Override in a subclass to manipulate the response + after it is returned by the ErrorGroupService server but before + it is returned to user code. + """ + return response + + def pre_update_group( + self, + request: error_group_service.UpdateGroupRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[error_group_service.UpdateGroupRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the ErrorGroupService server. + """ + return request, metadata + + def post_update_group(self, response: common.ErrorGroup) -> common.ErrorGroup: + """Post-rpc interceptor for update_group + + Override in a subclass to manipulate the response + after it is returned by the ErrorGroupService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ErrorGroupServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ErrorGroupServiceRestInterceptor + + +class ErrorGroupServiceRestTransport(ErrorGroupServiceTransport): + """REST backend transport for ErrorGroupService. + + Service for retrieving and updating individual error groups. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "clouderrorreporting.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ErrorGroupServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ErrorGroupServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _GetGroup(ErrorGroupServiceRestStub): + def __hash__(self): + return hash("GetGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: error_group_service.GetGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common.ErrorGroup: + r"""Call the get group method over HTTP. + + Args: + request (~.error_group_service.GetGroupRequest): + The request object. A request to return an individual + group. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common.ErrorGroup: + Description of a group of similar + error events. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{group_name=projects/*/groups/*}", + }, + ] + request, metadata = self._interceptor.pre_get_group(request, metadata) + pb_request = error_group_service.GetGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common.ErrorGroup() + pb_resp = common.ErrorGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_group(resp) + return resp + + class _UpdateGroup(ErrorGroupServiceRestStub): + def __hash__(self): + return hash("UpdateGroup") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: error_group_service.UpdateGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> common.ErrorGroup: + r"""Call the update group method over HTTP. + + Args: + request (~.error_group_service.UpdateGroupRequest): + The request object. A request to replace the existing + data for the given group. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.common.ErrorGroup: + Description of a group of similar + error events. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1beta1/{group.name=projects/*/groups/*}", + "body": "group", + }, + ] + request, metadata = self._interceptor.pre_update_group(request, metadata) + pb_request = error_group_service.UpdateGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = common.ErrorGroup() + pb_resp = common.ErrorGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_group(resp) + return resp + + @property + def get_group( + self, + ) -> Callable[[error_group_service.GetGroupRequest], common.ErrorGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_group( + self, + ) -> Callable[[error_group_service.UpdateGroupRequest], common.ErrorGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ErrorGroupServiceRestTransport",) diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py index 1033a8c9..ff3799e4 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py @@ -52,6 +52,7 @@ from .transports.base import ErrorStatsServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import ErrorStatsServiceGrpcTransport from .transports.grpc_asyncio import ErrorStatsServiceGrpcAsyncIOTransport +from .transports.rest import ErrorStatsServiceRestTransport class ErrorStatsServiceClientMeta(type): @@ -67,6 +68,7 @@ class ErrorStatsServiceClientMeta(type): ) # type: Dict[str, Type[ErrorStatsServiceTransport]] _transport_registry["grpc"] = ErrorStatsServiceGrpcTransport _transport_registry["grpc_asyncio"] = ErrorStatsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ErrorStatsServiceRestTransport def get_transport_class( cls, diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py index 484e788e..b1b560f3 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py @@ -19,15 +19,20 @@ from .base import ErrorStatsServiceTransport from .grpc import ErrorStatsServiceGrpcTransport from .grpc_asyncio import ErrorStatsServiceGrpcAsyncIOTransport +from .rest import ErrorStatsServiceRestTransport +from .rest import ErrorStatsServiceRestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[ErrorStatsServiceTransport]] _transport_registry["grpc"] = ErrorStatsServiceGrpcTransport _transport_registry["grpc_asyncio"] = ErrorStatsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ErrorStatsServiceRestTransport __all__ = ( "ErrorStatsServiceTransport", "ErrorStatsServiceGrpcTransport", "ErrorStatsServiceGrpcAsyncIOTransport", + "ErrorStatsServiceRestTransport", + "ErrorStatsServiceRestInterceptor", ) diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py new file mode 100644 index 00000000..779f59c5 --- /dev/null +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py @@ -0,0 +1,579 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.errorreporting_v1beta1.types import error_stats_service + +from .base import ( + ErrorStatsServiceTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ErrorStatsServiceRestInterceptor: + """Interceptor for ErrorStatsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ErrorStatsServiceRestTransport. + + .. code-block:: python + class MyCustomErrorStatsServiceInterceptor(ErrorStatsServiceRestInterceptor): + def pre_delete_events(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_events(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_events(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_events(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_group_stats(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_group_stats(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ErrorStatsServiceRestTransport(interceptor=MyCustomErrorStatsServiceInterceptor()) + client = ErrorStatsServiceClient(transport=transport) + + + """ + + def pre_delete_events( + self, + request: error_stats_service.DeleteEventsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[error_stats_service.DeleteEventsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_events + + Override in a subclass to manipulate the request or metadata + before they are sent to the ErrorStatsService server. + """ + return request, metadata + + def post_delete_events( + self, response: error_stats_service.DeleteEventsResponse + ) -> error_stats_service.DeleteEventsResponse: + """Post-rpc interceptor for delete_events + + Override in a subclass to manipulate the response + after it is returned by the ErrorStatsService server but before + it is returned to user code. + """ + return response + + def pre_list_events( + self, + request: error_stats_service.ListEventsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[error_stats_service.ListEventsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_events + + Override in a subclass to manipulate the request or metadata + before they are sent to the ErrorStatsService server. + """ + return request, metadata + + def post_list_events( + self, response: error_stats_service.ListEventsResponse + ) -> error_stats_service.ListEventsResponse: + """Post-rpc interceptor for list_events + + Override in a subclass to manipulate the response + after it is returned by the ErrorStatsService server but before + it is returned to user code. + """ + return response + + def pre_list_group_stats( + self, + request: error_stats_service.ListGroupStatsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[error_stats_service.ListGroupStatsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_group_stats + + Override in a subclass to manipulate the request or metadata + before they are sent to the ErrorStatsService server. + """ + return request, metadata + + def post_list_group_stats( + self, response: error_stats_service.ListGroupStatsResponse + ) -> error_stats_service.ListGroupStatsResponse: + """Post-rpc interceptor for list_group_stats + + Override in a subclass to manipulate the response + after it is returned by the ErrorStatsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ErrorStatsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ErrorStatsServiceRestInterceptor + + +class ErrorStatsServiceRestTransport(ErrorStatsServiceTransport): + """REST backend transport for ErrorStatsService. + + An API for retrieving and managing error statistics as well + as data for individual events. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "clouderrorreporting.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ErrorStatsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ErrorStatsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _DeleteEvents(ErrorStatsServiceRestStub): + def __hash__(self): + return hash("DeleteEvents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: error_stats_service.DeleteEventsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> error_stats_service.DeleteEventsResponse: + r"""Call the delete events method over HTTP. + + Args: + request (~.error_stats_service.DeleteEventsRequest): + The request object. Deletes all events in the project. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.error_stats_service.DeleteEventsResponse: + Response message for deleting error + events. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta1/{project_name=projects/*}/events", + }, + ] + request, metadata = self._interceptor.pre_delete_events(request, metadata) + pb_request = error_stats_service.DeleteEventsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = error_stats_service.DeleteEventsResponse() + pb_resp = error_stats_service.DeleteEventsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_events(resp) + return resp + + class _ListEvents(ErrorStatsServiceRestStub): + def __hash__(self): + return hash("ListEvents") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "groupId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: error_stats_service.ListEventsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> error_stats_service.ListEventsResponse: + r"""Call the list events method over HTTP. + + Args: + request (~.error_stats_service.ListEventsRequest): + The request object. Specifies a set of error events to + return. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.error_stats_service.ListEventsResponse: + Contains a set of requested error + events. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{project_name=projects/*}/events", + }, + ] + request, metadata = self._interceptor.pre_list_events(request, metadata) + pb_request = error_stats_service.ListEventsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = error_stats_service.ListEventsResponse() + pb_resp = error_stats_service.ListEventsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_events(resp) + return resp + + class _ListGroupStats(ErrorStatsServiceRestStub): + def __hash__(self): + return hash("ListGroupStats") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: error_stats_service.ListGroupStatsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> error_stats_service.ListGroupStatsResponse: + r"""Call the list group stats method over HTTP. + + Args: + request (~.error_stats_service.ListGroupStatsRequest): + The request object. Specifies a set of ``ErrorGroupStats`` to return. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.error_stats_service.ListGroupStatsResponse: + Contains a set of requested error + group stats. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{project_name=projects/*}/groupStats", + }, + ] + request, metadata = self._interceptor.pre_list_group_stats( + request, metadata + ) + pb_request = error_stats_service.ListGroupStatsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = error_stats_service.ListGroupStatsResponse() + pb_resp = error_stats_service.ListGroupStatsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_group_stats(resp) + return resp + + @property + def delete_events( + self, + ) -> Callable[ + [error_stats_service.DeleteEventsRequest], + error_stats_service.DeleteEventsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEvents(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_events( + self, + ) -> Callable[ + [error_stats_service.ListEventsRequest], error_stats_service.ListEventsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListEvents(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_group_stats( + self, + ) -> Callable[ + [error_stats_service.ListGroupStatsRequest], + error_stats_service.ListGroupStatsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListGroupStats(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ErrorStatsServiceRestTransport",) diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py index a7883f1f..f2d057b0 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py @@ -50,6 +50,7 @@ from .transports.base import ReportErrorsServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import ReportErrorsServiceGrpcTransport from .transports.grpc_asyncio import ReportErrorsServiceGrpcAsyncIOTransport +from .transports.rest import ReportErrorsServiceRestTransport class ReportErrorsServiceClientMeta(type): @@ -65,6 +66,7 @@ class ReportErrorsServiceClientMeta(type): ) # type: Dict[str, Type[ReportErrorsServiceTransport]] _transport_registry["grpc"] = ReportErrorsServiceGrpcTransport _transport_registry["grpc_asyncio"] = ReportErrorsServiceGrpcAsyncIOTransport + _transport_registry["rest"] = ReportErrorsServiceRestTransport def get_transport_class( cls, diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py index b96eb345..289d0ff1 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py @@ -19,6 +19,8 @@ from .base import ReportErrorsServiceTransport from .grpc import ReportErrorsServiceGrpcTransport from .grpc_asyncio import ReportErrorsServiceGrpcAsyncIOTransport +from .rest import ReportErrorsServiceRestTransport +from .rest import ReportErrorsServiceRestInterceptor # Compile a registry of transports. @@ -27,9 +29,12 @@ ) # type: Dict[str, Type[ReportErrorsServiceTransport]] _transport_registry["grpc"] = ReportErrorsServiceGrpcTransport _transport_registry["grpc_asyncio"] = ReportErrorsServiceGrpcAsyncIOTransport +_transport_registry["rest"] = ReportErrorsServiceRestTransport __all__ = ( "ReportErrorsServiceTransport", "ReportErrorsServiceGrpcTransport", "ReportErrorsServiceGrpcAsyncIOTransport", + "ReportErrorsServiceRestTransport", + "ReportErrorsServiceRestInterceptor", ) diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py new file mode 100644 index 00000000..cf238616 --- /dev/null +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py @@ -0,0 +1,329 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.errorreporting_v1beta1.types import report_errors_service + +from .base import ( + ReportErrorsServiceTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class ReportErrorsServiceRestInterceptor: + """Interceptor for ReportErrorsService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ReportErrorsServiceRestTransport. + + .. code-block:: python + class MyCustomReportErrorsServiceInterceptor(ReportErrorsServiceRestInterceptor): + def pre_report_error_event(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_report_error_event(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ReportErrorsServiceRestTransport(interceptor=MyCustomReportErrorsServiceInterceptor()) + client = ReportErrorsServiceClient(transport=transport) + + + """ + + def pre_report_error_event( + self, + request: report_errors_service.ReportErrorEventRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[ + report_errors_service.ReportErrorEventRequest, Sequence[Tuple[str, str]] + ]: + """Pre-rpc interceptor for report_error_event + + Override in a subclass to manipulate the request or metadata + before they are sent to the ReportErrorsService server. + """ + return request, metadata + + def post_report_error_event( + self, response: report_errors_service.ReportErrorEventResponse + ) -> report_errors_service.ReportErrorEventResponse: + """Post-rpc interceptor for report_error_event + + Override in a subclass to manipulate the response + after it is returned by the ReportErrorsService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class ReportErrorsServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ReportErrorsServiceRestInterceptor + + +class ReportErrorsServiceRestTransport(ReportErrorsServiceTransport): + """REST backend transport for ReportErrorsService. + + An API for reporting error events. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "clouderrorreporting.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ReportErrorsServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ReportErrorsServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _ReportErrorEvent(ReportErrorsServiceRestStub): + def __hash__(self): + return hash("ReportErrorEvent") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: report_errors_service.ReportErrorEventRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> report_errors_service.ReportErrorEventResponse: + r"""Call the report error event method over HTTP. + + Args: + request (~.report_errors_service.ReportErrorEventRequest): + The request object. A request for reporting an individual + error event. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.report_errors_service.ReportErrorEventResponse: + Response for reporting an individual + error event. Data may be added to this + message in the future. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{project_name=projects/*}/events:report", + "body": "event", + }, + ] + request, metadata = self._interceptor.pre_report_error_event( + request, metadata + ) + pb_request = report_errors_service.ReportErrorEventRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = report_errors_service.ReportErrorEventResponse() + pb_resp = report_errors_service.ReportErrorEventResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_report_error_event(resp) + return resp + + @property + def report_error_event( + self, + ) -> Callable[ + [report_errors_service.ReportErrorEventRequest], + report_errors_service.ReportErrorEventResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ReportErrorEvent(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ReportErrorsServiceRestTransport",) diff --git a/google/cloud/errorreporting_v1beta1/types/common.py b/google/cloud/errorreporting_v1beta1/types/common.py index 2f04117b..43ac4ba2 100644 --- a/google/cloud/errorreporting_v1beta1/types/common.py +++ b/google/cloud/errorreporting_v1beta1/types/common.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/google/cloud/errorreporting_v1beta1/types/error_group_service.py b/google/cloud/errorreporting_v1beta1/types/error_group_service.py index a3afd595..63ecabf9 100644 --- a/google/cloud/errorreporting_v1beta1/types/error_group_service.py +++ b/google/cloud/errorreporting_v1beta1/types/error_group_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py index 60d0f90a..afb2668a 100644 --- a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py +++ b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/google/cloud/errorreporting_v1beta1/types/report_errors_service.py b/google/cloud/errorreporting_v1beta1/types/report_errors_service.py index e154c5fe..a3087c1a 100644 --- a/google/cloud/errorreporting_v1beta1/types/report_errors_service.py +++ b/google/cloud/errorreporting_v1beta1/types/report_errors_service.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index 9a89b962..fedd655c 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "1.8.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py index 994c484c..90643d98 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py @@ -24,10 +24,17 @@ import grpc from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -100,6 +107,7 @@ def test__get_default_mtls_endpoint(): [ (ErrorGroupServiceClient, "grpc"), (ErrorGroupServiceAsyncClient, "grpc_asyncio"), + (ErrorGroupServiceClient, "rest"), ], ) def test_error_group_service_client_from_service_account_info( @@ -115,7 +123,11 @@ def test_error_group_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("clouderrorreporting.googleapis.com:443") + assert client.transport._host == ( + "clouderrorreporting.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://clouderrorreporting.googleapis.com" + ) @pytest.mark.parametrize( @@ -123,6 +135,7 @@ def test_error_group_service_client_from_service_account_info( [ (transports.ErrorGroupServiceGrpcTransport, "grpc"), (transports.ErrorGroupServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ErrorGroupServiceRestTransport, "rest"), ], ) def test_error_group_service_client_service_account_always_use_jwt( @@ -148,6 +161,7 @@ def test_error_group_service_client_service_account_always_use_jwt( [ (ErrorGroupServiceClient, "grpc"), (ErrorGroupServiceAsyncClient, "grpc_asyncio"), + (ErrorGroupServiceClient, "rest"), ], ) def test_error_group_service_client_from_service_account_file( @@ -170,13 +184,18 @@ def test_error_group_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("clouderrorreporting.googleapis.com:443") + assert client.transport._host == ( + "clouderrorreporting.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://clouderrorreporting.googleapis.com" + ) def test_error_group_service_client_get_transport_class(): transport = ErrorGroupServiceClient.get_transport_class() available_transports = [ transports.ErrorGroupServiceGrpcTransport, + transports.ErrorGroupServiceRestTransport, ] assert transport in available_transports @@ -193,6 +212,7 @@ def test_error_group_service_client_get_transport_class(): transports.ErrorGroupServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (ErrorGroupServiceClient, transports.ErrorGroupServiceRestTransport, "rest"), ], ) @mock.patch.object( @@ -348,6 +368,18 @@ def test_error_group_service_client_client_options( "grpc_asyncio", "false", ), + ( + ErrorGroupServiceClient, + transports.ErrorGroupServiceRestTransport, + "rest", + "true", + ), + ( + ErrorGroupServiceClient, + transports.ErrorGroupServiceRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -547,6 +579,7 @@ def test_error_group_service_client_get_mtls_endpoint_and_cert_source(client_cla transports.ErrorGroupServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (ErrorGroupServiceClient, transports.ErrorGroupServiceRestTransport, "rest"), ], ) def test_error_group_service_client_client_options_scopes( @@ -587,6 +620,12 @@ def test_error_group_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + ( + ErrorGroupServiceClient, + transports.ErrorGroupServiceRestTransport, + "rest", + None, + ), ], ) def test_error_group_service_client_client_options_credentials_file( @@ -1171,6 +1210,552 @@ async def test_update_group_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + error_group_service.GetGroupRequest, + dict, + ], +) +def test_get_group_rest(request_type): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"group_name": "projects/sample1/groups/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common.ErrorGroup( + name="name_value", + group_id="group_id_value", + resolution_status=common.ResolutionStatus.OPEN, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = common.ErrorGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_group(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.ErrorGroup) + assert response.name == "name_value" + assert response.group_id == "group_id_value" + assert response.resolution_status == common.ResolutionStatus.OPEN + + +def test_get_group_rest_required_fields( + request_type=error_group_service.GetGroupRequest, +): + transport_class = transports.ErrorGroupServiceRestTransport + + request_init = {} + request_init["group_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["groupName"] = "group_name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "groupName" in jsonified_request + assert jsonified_request["groupName"] == "group_name_value" + + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common.ErrorGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = common.ErrorGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_group_rest_unset_required_fields(): + transport = transports.ErrorGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("groupName",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_group_rest_interceptors(null_interceptor): + transport = transports.ErrorGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ErrorGroupServiceRestInterceptor(), + ) + client = ErrorGroupServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ErrorGroupServiceRestInterceptor, "post_get_group" + ) as post, mock.patch.object( + transports.ErrorGroupServiceRestInterceptor, "pre_get_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = error_group_service.GetGroupRequest.pb( + error_group_service.GetGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common.ErrorGroup.to_json(common.ErrorGroup()) + + request = error_group_service.GetGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.ErrorGroup() + + client.get_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_group_rest_bad_request( + transport: str = "rest", request_type=error_group_service.GetGroupRequest +): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"group_name": "projects/sample1/groups/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_group(request) + + +def test_get_group_rest_flattened(): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common.ErrorGroup() + + # get arguments that satisfy an http rule for this method + sample_request = {"group_name": "projects/sample1/groups/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + group_name="group_name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = common.ErrorGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{group_name=projects/*/groups/*}" % client.transport._host, + args[1], + ) + + +def test_get_group_rest_flattened_error(transport: str = "rest"): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_group( + error_group_service.GetGroupRequest(), + group_name="group_name_value", + ) + + +def test_get_group_rest_error(): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + error_group_service.UpdateGroupRequest, + dict, + ], +) +def test_update_group_rest(request_type): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"group": {"name": "projects/sample1/groups/sample2"}} + request_init["group"] = { + "name": "projects/sample1/groups/sample2", + "group_id": "group_id_value", + "tracking_issues": [{"url": "url_value"}], + "resolution_status": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common.ErrorGroup( + name="name_value", + group_id="group_id_value", + resolution_status=common.ResolutionStatus.OPEN, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = common.ErrorGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_group(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.ErrorGroup) + assert response.name == "name_value" + assert response.group_id == "group_id_value" + assert response.resolution_status == common.ResolutionStatus.OPEN + + +def test_update_group_rest_required_fields( + request_type=error_group_service.UpdateGroupRequest, +): + transport_class = transports.ErrorGroupServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = common.ErrorGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "put", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = common.ErrorGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_group_rest_unset_required_fields(): + transport = transports.ErrorGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("group",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_group_rest_interceptors(null_interceptor): + transport = transports.ErrorGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ErrorGroupServiceRestInterceptor(), + ) + client = ErrorGroupServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ErrorGroupServiceRestInterceptor, "post_update_group" + ) as post, mock.patch.object( + transports.ErrorGroupServiceRestInterceptor, "pre_update_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = error_group_service.UpdateGroupRequest.pb( + error_group_service.UpdateGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = common.ErrorGroup.to_json(common.ErrorGroup()) + + request = error_group_service.UpdateGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.ErrorGroup() + + client.update_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_group_rest_bad_request( + transport: str = "rest", request_type=error_group_service.UpdateGroupRequest +): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"group": {"name": "projects/sample1/groups/sample2"}} + request_init["group"] = { + "name": "projects/sample1/groups/sample2", + "group_id": "group_id_value", + "tracking_issues": [{"url": "url_value"}], + "resolution_status": 1, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_group(request) + + +def test_update_group_rest_flattened(): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common.ErrorGroup() + + # get arguments that satisfy an http rule for this method + sample_request = {"group": {"name": "projects/sample1/groups/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + group=common.ErrorGroup(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = common.ErrorGroup.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{group.name=projects/*/groups/*}" % client.transport._host, + args[1], + ) + + +def test_update_group_rest_flattened_error(transport: str = "rest"): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_group( + error_group_service.UpdateGroupRequest(), + group=common.ErrorGroup(name="name_value"), + ) + + +def test_update_group_rest_error(): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ErrorGroupServiceGrpcTransport( @@ -1252,6 +1837,7 @@ def test_transport_get_channel(): [ transports.ErrorGroupServiceGrpcTransport, transports.ErrorGroupServiceGrpcAsyncIOTransport, + transports.ErrorGroupServiceRestTransport, ], ) def test_transport_adc(transport_class): @@ -1266,6 +1852,7 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): @@ -1396,6 +1983,7 @@ def test_error_group_service_transport_auth_adc(transport_class): [ transports.ErrorGroupServiceGrpcTransport, transports.ErrorGroupServiceGrpcAsyncIOTransport, + transports.ErrorGroupServiceRestTransport, ], ) def test_error_group_service_transport_auth_gdch_credentials(transport_class): @@ -1495,11 +2083,23 @@ def test_error_group_service_grpc_transport_client_cert_source_for_mtls( ) +def test_error_group_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ErrorGroupServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_error_group_service_host_no_port(transport_name): @@ -1510,7 +2110,11 @@ def test_error_group_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("clouderrorreporting.googleapis.com:443") + assert client.transport._host == ( + "clouderrorreporting.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://clouderrorreporting.googleapis.com" + ) @pytest.mark.parametrize( @@ -1518,6 +2122,7 @@ def test_error_group_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_error_group_service_host_with_port(transport_name): @@ -1528,7 +2133,36 @@ def test_error_group_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("clouderrorreporting.googleapis.com:8000") + assert client.transport._host == ( + "clouderrorreporting.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://clouderrorreporting.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_error_group_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ErrorGroupServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ErrorGroupServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_group._session + session2 = client2.transport.get_group._session + assert session1 != session2 + session1 = client1.transport.update_group._session + session2 = client2.transport.update_group._session + assert session1 != session2 def test_error_group_service_grpc_transport_channel(): @@ -1822,6 +2456,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -1839,6 +2474,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py index c5fbcf64..4ea47201 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py @@ -24,10 +24,17 @@ import grpc from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -103,6 +110,7 @@ def test__get_default_mtls_endpoint(): [ (ErrorStatsServiceClient, "grpc"), (ErrorStatsServiceAsyncClient, "grpc_asyncio"), + (ErrorStatsServiceClient, "rest"), ], ) def test_error_stats_service_client_from_service_account_info( @@ -118,7 +126,11 @@ def test_error_stats_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("clouderrorreporting.googleapis.com:443") + assert client.transport._host == ( + "clouderrorreporting.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://clouderrorreporting.googleapis.com" + ) @pytest.mark.parametrize( @@ -126,6 +138,7 @@ def test_error_stats_service_client_from_service_account_info( [ (transports.ErrorStatsServiceGrpcTransport, "grpc"), (transports.ErrorStatsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ErrorStatsServiceRestTransport, "rest"), ], ) def test_error_stats_service_client_service_account_always_use_jwt( @@ -151,6 +164,7 @@ def test_error_stats_service_client_service_account_always_use_jwt( [ (ErrorStatsServiceClient, "grpc"), (ErrorStatsServiceAsyncClient, "grpc_asyncio"), + (ErrorStatsServiceClient, "rest"), ], ) def test_error_stats_service_client_from_service_account_file( @@ -173,13 +187,18 @@ def test_error_stats_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("clouderrorreporting.googleapis.com:443") + assert client.transport._host == ( + "clouderrorreporting.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://clouderrorreporting.googleapis.com" + ) def test_error_stats_service_client_get_transport_class(): transport = ErrorStatsServiceClient.get_transport_class() available_transports = [ transports.ErrorStatsServiceGrpcTransport, + transports.ErrorStatsServiceRestTransport, ] assert transport in available_transports @@ -196,6 +215,7 @@ def test_error_stats_service_client_get_transport_class(): transports.ErrorStatsServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (ErrorStatsServiceClient, transports.ErrorStatsServiceRestTransport, "rest"), ], ) @mock.patch.object( @@ -351,6 +371,18 @@ def test_error_stats_service_client_client_options( "grpc_asyncio", "false", ), + ( + ErrorStatsServiceClient, + transports.ErrorStatsServiceRestTransport, + "rest", + "true", + ), + ( + ErrorStatsServiceClient, + transports.ErrorStatsServiceRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -550,6 +582,7 @@ def test_error_stats_service_client_get_mtls_endpoint_and_cert_source(client_cla transports.ErrorStatsServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (ErrorStatsServiceClient, transports.ErrorStatsServiceRestTransport, "rest"), ], ) def test_error_stats_service_client_client_options_scopes( @@ -590,6 +623,12 @@ def test_error_stats_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + ( + ErrorStatsServiceClient, + transports.ErrorStatsServiceRestTransport, + "rest", + None, + ), ], ) def test_error_stats_service_client_client_options_credentials_file( @@ -1804,6 +1843,996 @@ async def test_delete_events_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + error_stats_service.ListGroupStatsRequest, + dict, + ], +) +def test_list_group_stats_rest(request_type): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_name": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = error_stats_service.ListGroupStatsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = error_stats_service.ListGroupStatsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_group_stats(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGroupStatsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_group_stats_rest_required_fields( + request_type=error_stats_service.ListGroupStatsRequest, +): + transport_class = transports.ErrorStatsServiceRestTransport + + request_init = {} + request_init["project_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_group_stats._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectName"] = "project_name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_group_stats._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "alignment", + "alignment_time", + "group_id", + "order", + "page_size", + "page_token", + "service_filter", + "time_range", + "timed_count_duration", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectName" in jsonified_request + assert jsonified_request["projectName"] == "project_name_value" + + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = error_stats_service.ListGroupStatsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = error_stats_service.ListGroupStatsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_group_stats(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_group_stats_rest_unset_required_fields(): + transport = transports.ErrorStatsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_group_stats._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "alignment", + "alignmentTime", + "groupId", + "order", + "pageSize", + "pageToken", + "serviceFilter", + "timeRange", + "timedCountDuration", + ) + ) + & set(("projectName",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_group_stats_rest_interceptors(null_interceptor): + transport = transports.ErrorStatsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ErrorStatsServiceRestInterceptor(), + ) + client = ErrorStatsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ErrorStatsServiceRestInterceptor, "post_list_group_stats" + ) as post, mock.patch.object( + transports.ErrorStatsServiceRestInterceptor, "pre_list_group_stats" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = error_stats_service.ListGroupStatsRequest.pb( + error_stats_service.ListGroupStatsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = error_stats_service.ListGroupStatsResponse.to_json( + error_stats_service.ListGroupStatsResponse() + ) + + request = error_stats_service.ListGroupStatsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = error_stats_service.ListGroupStatsResponse() + + client.list_group_stats( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_group_stats_rest_bad_request( + transport: str = "rest", request_type=error_stats_service.ListGroupStatsRequest +): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_name": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_group_stats(request) + + +def test_list_group_stats_rest_flattened(): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = error_stats_service.ListGroupStatsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_name": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project_name="project_name_value", + time_range=error_stats_service.QueryTimeRange( + period=error_stats_service.QueryTimeRange.Period.PERIOD_1_HOUR + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = error_stats_service.ListGroupStatsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_group_stats(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{project_name=projects/*}/groupStats" % client.transport._host, + args[1], + ) + + +def test_list_group_stats_rest_flattened_error(transport: str = "rest"): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_group_stats( + error_stats_service.ListGroupStatsRequest(), + project_name="project_name_value", + time_range=error_stats_service.QueryTimeRange( + period=error_stats_service.QueryTimeRange.Period.PERIOD_1_HOUR + ), + ) + + +def test_list_group_stats_rest_pager(transport: str = "rest"): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + error_stats_service.ListGroupStatsResponse( + error_group_stats=[ + error_stats_service.ErrorGroupStats(), + error_stats_service.ErrorGroupStats(), + error_stats_service.ErrorGroupStats(), + ], + next_page_token="abc", + ), + error_stats_service.ListGroupStatsResponse( + error_group_stats=[], + next_page_token="def", + ), + error_stats_service.ListGroupStatsResponse( + error_group_stats=[ + error_stats_service.ErrorGroupStats(), + ], + next_page_token="ghi", + ), + error_stats_service.ListGroupStatsResponse( + error_group_stats=[ + error_stats_service.ErrorGroupStats(), + error_stats_service.ErrorGroupStats(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + error_stats_service.ListGroupStatsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project_name": "projects/sample1"} + + pager = client.list_group_stats(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, error_stats_service.ErrorGroupStats) for i in results) + + pages = list(client.list_group_stats(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + error_stats_service.ListEventsRequest, + dict, + ], +) +def test_list_events_rest(request_type): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_name": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = error_stats_service.ListEventsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = error_stats_service.ListEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_events(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEventsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_events_rest_required_fields( + request_type=error_stats_service.ListEventsRequest, +): + transport_class = transports.ErrorStatsServiceRestTransport + + request_init = {} + request_init["project_name"] = "" + request_init["group_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "groupId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_events._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "groupId" in jsonified_request + assert jsonified_request["groupId"] == request_init["group_id"] + + jsonified_request["projectName"] = "project_name_value" + jsonified_request["groupId"] = "group_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_events._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "group_id", + "page_size", + "page_token", + "service_filter", + "time_range", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectName" in jsonified_request + assert jsonified_request["projectName"] == "project_name_value" + assert "groupId" in jsonified_request + assert jsonified_request["groupId"] == "group_id_value" + + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = error_stats_service.ListEventsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = error_stats_service.ListEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_events(request) + + expected_params = [ + ( + "groupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_events_rest_unset_required_fields(): + transport = transports.ErrorStatsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_events._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "groupId", + "pageSize", + "pageToken", + "serviceFilter", + "timeRange", + ) + ) + & set( + ( + "projectName", + "groupId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_events_rest_interceptors(null_interceptor): + transport = transports.ErrorStatsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ErrorStatsServiceRestInterceptor(), + ) + client = ErrorStatsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ErrorStatsServiceRestInterceptor, "post_list_events" + ) as post, mock.patch.object( + transports.ErrorStatsServiceRestInterceptor, "pre_list_events" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = error_stats_service.ListEventsRequest.pb( + error_stats_service.ListEventsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = error_stats_service.ListEventsResponse.to_json( + error_stats_service.ListEventsResponse() + ) + + request = error_stats_service.ListEventsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = error_stats_service.ListEventsResponse() + + client.list_events( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_events_rest_bad_request( + transport: str = "rest", request_type=error_stats_service.ListEventsRequest +): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_name": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_events(request) + + +def test_list_events_rest_flattened(): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = error_stats_service.ListEventsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_name": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project_name="project_name_value", + group_id="group_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = error_stats_service.ListEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_events(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{project_name=projects/*}/events" % client.transport._host, + args[1], + ) + + +def test_list_events_rest_flattened_error(transport: str = "rest"): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_events( + error_stats_service.ListEventsRequest(), + project_name="project_name_value", + group_id="group_id_value", + ) + + +def test_list_events_rest_pager(transport: str = "rest"): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + error_stats_service.ListEventsResponse( + error_events=[ + common.ErrorEvent(), + common.ErrorEvent(), + common.ErrorEvent(), + ], + next_page_token="abc", + ), + error_stats_service.ListEventsResponse( + error_events=[], + next_page_token="def", + ), + error_stats_service.ListEventsResponse( + error_events=[ + common.ErrorEvent(), + ], + next_page_token="ghi", + ), + error_stats_service.ListEventsResponse( + error_events=[ + common.ErrorEvent(), + common.ErrorEvent(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + error_stats_service.ListEventsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project_name": "projects/sample1"} + + pager = client.list_events(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, common.ErrorEvent) for i in results) + + pages = list(client.list_events(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + error_stats_service.DeleteEventsRequest, + dict, + ], +) +def test_delete_events_rest(request_type): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_name": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = error_stats_service.DeleteEventsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = error_stats_service.DeleteEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_events(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, error_stats_service.DeleteEventsResponse) + + +def test_delete_events_rest_required_fields( + request_type=error_stats_service.DeleteEventsRequest, +): + transport_class = transports.ErrorStatsServiceRestTransport + + request_init = {} + request_init["project_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_events._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectName"] = "project_name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_events._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectName" in jsonified_request + assert jsonified_request["projectName"] == "project_name_value" + + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = error_stats_service.DeleteEventsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = error_stats_service.DeleteEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_events(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_events_rest_unset_required_fields(): + transport = transports.ErrorStatsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_events._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("projectName",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_events_rest_interceptors(null_interceptor): + transport = transports.ErrorStatsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ErrorStatsServiceRestInterceptor(), + ) + client = ErrorStatsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ErrorStatsServiceRestInterceptor, "post_delete_events" + ) as post, mock.patch.object( + transports.ErrorStatsServiceRestInterceptor, "pre_delete_events" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = error_stats_service.DeleteEventsRequest.pb( + error_stats_service.DeleteEventsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = error_stats_service.DeleteEventsResponse.to_json( + error_stats_service.DeleteEventsResponse() + ) + + request = error_stats_service.DeleteEventsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = error_stats_service.DeleteEventsResponse() + + client.delete_events( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_events_rest_bad_request( + transport: str = "rest", request_type=error_stats_service.DeleteEventsRequest +): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_name": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_events(request) + + +def test_delete_events_rest_flattened(): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = error_stats_service.DeleteEventsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_name": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project_name="project_name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = error_stats_service.DeleteEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_events(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{project_name=projects/*}/events" % client.transport._host, + args[1], + ) + + +def test_delete_events_rest_flattened_error(transport: str = "rest"): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_events( + error_stats_service.DeleteEventsRequest(), + project_name="project_name_value", + ) + + +def test_delete_events_rest_error(): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ErrorStatsServiceGrpcTransport( @@ -1885,6 +2914,7 @@ def test_transport_get_channel(): [ transports.ErrorStatsServiceGrpcTransport, transports.ErrorStatsServiceGrpcAsyncIOTransport, + transports.ErrorStatsServiceRestTransport, ], ) def test_transport_adc(transport_class): @@ -1899,6 +2929,7 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): @@ -2030,6 +3061,7 @@ def test_error_stats_service_transport_auth_adc(transport_class): [ transports.ErrorStatsServiceGrpcTransport, transports.ErrorStatsServiceGrpcAsyncIOTransport, + transports.ErrorStatsServiceRestTransport, ], ) def test_error_stats_service_transport_auth_gdch_credentials(transport_class): @@ -2129,11 +3161,23 @@ def test_error_stats_service_grpc_transport_client_cert_source_for_mtls( ) +def test_error_stats_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ErrorStatsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_error_stats_service_host_no_port(transport_name): @@ -2144,7 +3188,11 @@ def test_error_stats_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("clouderrorreporting.googleapis.com:443") + assert client.transport._host == ( + "clouderrorreporting.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://clouderrorreporting.googleapis.com" + ) @pytest.mark.parametrize( @@ -2152,6 +3200,7 @@ def test_error_stats_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_error_stats_service_host_with_port(transport_name): @@ -2162,7 +3211,39 @@ def test_error_stats_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("clouderrorreporting.googleapis.com:8000") + assert client.transport._host == ( + "clouderrorreporting.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://clouderrorreporting.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_error_stats_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ErrorStatsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ErrorStatsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.list_group_stats._session + session2 = client2.transport.list_group_stats._session + assert session1 != session2 + session1 = client1.transport.list_events._session + session2 = client2.transport.list_events._session + assert session1 != session2 + session1 = client1.transport.delete_events._session + session2 = client2.transport.delete_events._session + assert session1 != session2 def test_error_stats_service_grpc_transport_channel(): @@ -2456,6 +3537,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -2473,6 +3555,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py index 85eb3987..3a69144f 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py @@ -24,10 +24,17 @@ import grpc from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -103,6 +110,7 @@ def test__get_default_mtls_endpoint(): [ (ReportErrorsServiceClient, "grpc"), (ReportErrorsServiceAsyncClient, "grpc_asyncio"), + (ReportErrorsServiceClient, "rest"), ], ) def test_report_errors_service_client_from_service_account_info( @@ -118,7 +126,11 @@ def test_report_errors_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("clouderrorreporting.googleapis.com:443") + assert client.transport._host == ( + "clouderrorreporting.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://clouderrorreporting.googleapis.com" + ) @pytest.mark.parametrize( @@ -126,6 +138,7 @@ def test_report_errors_service_client_from_service_account_info( [ (transports.ReportErrorsServiceGrpcTransport, "grpc"), (transports.ReportErrorsServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.ReportErrorsServiceRestTransport, "rest"), ], ) def test_report_errors_service_client_service_account_always_use_jwt( @@ -151,6 +164,7 @@ def test_report_errors_service_client_service_account_always_use_jwt( [ (ReportErrorsServiceClient, "grpc"), (ReportErrorsServiceAsyncClient, "grpc_asyncio"), + (ReportErrorsServiceClient, "rest"), ], ) def test_report_errors_service_client_from_service_account_file( @@ -173,13 +187,18 @@ def test_report_errors_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("clouderrorreporting.googleapis.com:443") + assert client.transport._host == ( + "clouderrorreporting.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://clouderrorreporting.googleapis.com" + ) def test_report_errors_service_client_get_transport_class(): transport = ReportErrorsServiceClient.get_transport_class() available_transports = [ transports.ReportErrorsServiceGrpcTransport, + transports.ReportErrorsServiceRestTransport, ] assert transport in available_transports @@ -200,6 +219,11 @@ def test_report_errors_service_client_get_transport_class(): transports.ReportErrorsServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + ( + ReportErrorsServiceClient, + transports.ReportErrorsServiceRestTransport, + "rest", + ), ], ) @mock.patch.object( @@ -355,6 +379,18 @@ def test_report_errors_service_client_client_options( "grpc_asyncio", "false", ), + ( + ReportErrorsServiceClient, + transports.ReportErrorsServiceRestTransport, + "rest", + "true", + ), + ( + ReportErrorsServiceClient, + transports.ReportErrorsServiceRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -558,6 +594,11 @@ def test_report_errors_service_client_get_mtls_endpoint_and_cert_source(client_c transports.ReportErrorsServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + ( + ReportErrorsServiceClient, + transports.ReportErrorsServiceRestTransport, + "rest", + ), ], ) def test_report_errors_service_client_client_options_scopes( @@ -598,6 +639,12 @@ def test_report_errors_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + ( + ReportErrorsServiceClient, + transports.ReportErrorsServiceRestTransport, + "rest", + None, + ), ], ) def test_report_errors_service_client_client_options_credentials_file( @@ -973,6 +1020,344 @@ async def test_report_error_event_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + report_errors_service.ReportErrorEventRequest, + dict, + ], +) +def test_report_error_event_rest(request_type): + client = ReportErrorsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_name": "projects/sample1"} + request_init["event"] = { + "event_time": {"seconds": 751, "nanos": 543}, + "service_context": { + "service": "service_value", + "version": "version_value", + "resource_type": "resource_type_value", + }, + "message": "message_value", + "context": { + "http_request": { + "method": "method_value", + "url": "url_value", + "user_agent": "user_agent_value", + "referrer": "referrer_value", + "response_status_code": 2156, + "remote_ip": "remote_ip_value", + }, + "user": "user_value", + "report_location": { + "file_path": "file_path_value", + "line_number": 1168, + "function_name": "function_name_value", + }, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_errors_service.ReportErrorEventResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = report_errors_service.ReportErrorEventResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.report_error_event(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, report_errors_service.ReportErrorEventResponse) + + +def test_report_error_event_rest_required_fields( + request_type=report_errors_service.ReportErrorEventRequest, +): + transport_class = transports.ReportErrorsServiceRestTransport + + request_init = {} + request_init["project_name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).report_error_event._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectName"] = "project_name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).report_error_event._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectName" in jsonified_request + assert jsonified_request["projectName"] == "project_name_value" + + client = ReportErrorsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = report_errors_service.ReportErrorEventResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = report_errors_service.ReportErrorEventResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.report_error_event(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_report_error_event_rest_unset_required_fields(): + transport = transports.ReportErrorsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.report_error_event._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectName", + "event", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_report_error_event_rest_interceptors(null_interceptor): + transport = transports.ReportErrorsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReportErrorsServiceRestInterceptor(), + ) + client = ReportErrorsServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReportErrorsServiceRestInterceptor, "post_report_error_event" + ) as post, mock.patch.object( + transports.ReportErrorsServiceRestInterceptor, "pre_report_error_event" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = report_errors_service.ReportErrorEventRequest.pb( + report_errors_service.ReportErrorEventRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + report_errors_service.ReportErrorEventResponse.to_json( + report_errors_service.ReportErrorEventResponse() + ) + ) + + request = report_errors_service.ReportErrorEventRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = report_errors_service.ReportErrorEventResponse() + + client.report_error_event( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_report_error_event_rest_bad_request( + transport: str = "rest", request_type=report_errors_service.ReportErrorEventRequest +): + client = ReportErrorsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_name": "projects/sample1"} + request_init["event"] = { + "event_time": {"seconds": 751, "nanos": 543}, + "service_context": { + "service": "service_value", + "version": "version_value", + "resource_type": "resource_type_value", + }, + "message": "message_value", + "context": { + "http_request": { + "method": "method_value", + "url": "url_value", + "user_agent": "user_agent_value", + "referrer": "referrer_value", + "response_status_code": 2156, + "remote_ip": "remote_ip_value", + }, + "user": "user_value", + "report_location": { + "file_path": "file_path_value", + "line_number": 1168, + "function_name": "function_name_value", + }, + }, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.report_error_event(request) + + +def test_report_error_event_rest_flattened(): + client = ReportErrorsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_errors_service.ReportErrorEventResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_name": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project_name="project_name_value", + event=report_errors_service.ReportedErrorEvent( + event_time=timestamp_pb2.Timestamp(seconds=751) + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = report_errors_service.ReportErrorEventResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.report_error_event(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{project_name=projects/*}/events:report" + % client.transport._host, + args[1], + ) + + +def test_report_error_event_rest_flattened_error(transport: str = "rest"): + client = ReportErrorsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.report_error_event( + report_errors_service.ReportErrorEventRequest(), + project_name="project_name_value", + event=report_errors_service.ReportedErrorEvent( + event_time=timestamp_pb2.Timestamp(seconds=751) + ), + ) + + +def test_report_error_event_rest_error(): + client = ReportErrorsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ReportErrorsServiceGrpcTransport( @@ -1054,6 +1439,7 @@ def test_transport_get_channel(): [ transports.ReportErrorsServiceGrpcTransport, transports.ReportErrorsServiceGrpcAsyncIOTransport, + transports.ReportErrorsServiceRestTransport, ], ) def test_transport_adc(transport_class): @@ -1068,6 +1454,7 @@ def test_transport_adc(transport_class): "transport_name", [ "grpc", + "rest", ], ) def test_transport_kind(transport_name): @@ -1195,6 +1582,7 @@ def test_report_errors_service_transport_auth_adc(transport_class): [ transports.ReportErrorsServiceGrpcTransport, transports.ReportErrorsServiceGrpcAsyncIOTransport, + transports.ReportErrorsServiceRestTransport, ], ) def test_report_errors_service_transport_auth_gdch_credentials(transport_class): @@ -1294,11 +1682,23 @@ def test_report_errors_service_grpc_transport_client_cert_source_for_mtls( ) +def test_report_errors_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ReportErrorsServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_report_errors_service_host_no_port(transport_name): @@ -1309,7 +1709,11 @@ def test_report_errors_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("clouderrorreporting.googleapis.com:443") + assert client.transport._host == ( + "clouderrorreporting.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://clouderrorreporting.googleapis.com" + ) @pytest.mark.parametrize( @@ -1317,6 +1721,7 @@ def test_report_errors_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_report_errors_service_host_with_port(transport_name): @@ -1327,7 +1732,33 @@ def test_report_errors_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("clouderrorreporting.googleapis.com:8000") + assert client.transport._host == ( + "clouderrorreporting.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://clouderrorreporting.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_report_errors_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ReportErrorsServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ReportErrorsServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.report_error_event._session + session2 = client2.transport.report_error_event._session + assert session1 != session2 def test_report_errors_service_grpc_transport_channel(): @@ -1598,6 +2029,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -1615,6 +2047,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: From 5dc03004a1dc1040e40ab57894cb9b8f3277c8e7 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 28 Feb 2023 18:43:50 -0500 Subject: [PATCH 048/111] chore(main): release 1.9.0 (#420) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 7 +++++++ google/cloud/error_reporting/gapic_version.py | 2 +- google/cloud/errorreporting_v1beta1/gapic_version.py | 2 +- ...tadata_google.devtools.clouderrorreporting.v1beta1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 7017b7cf..44204dd0 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.8.2" + ".": "1.9.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index cebcaae1..dcf9c0bd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## [1.9.0](https://github.com/googleapis/python-error-reporting/compare/v1.8.2...v1.9.0) (2023-02-28) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#418](https://github.com/googleapis/python-error-reporting/issues/418)) ([b2b9eab](https://github.com/googleapis/python-error-reporting/commit/b2b9eab649f413d7ad8a47fd660f9e2bcc32a820)) + ## [1.8.2](https://github.com/googleapis/python-error-reporting/compare/v1.8.1...v1.8.2) (2023-02-04) diff --git a/google/cloud/error_reporting/gapic_version.py b/google/cloud/error_reporting/gapic_version.py index 9a1b07ea..163d1511 100644 --- a/google/cloud/error_reporting/gapic_version.py +++ b/google/cloud/error_reporting/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.2" # {x-release-please-version} +__version__ = "1.9.0" # {x-release-please-version} diff --git a/google/cloud/errorreporting_v1beta1/gapic_version.py b/google/cloud/errorreporting_v1beta1/gapic_version.py index 9a1b07ea..163d1511 100644 --- a/google/cloud/errorreporting_v1beta1/gapic_version.py +++ b/google/cloud/errorreporting_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.8.2" # {x-release-please-version} +__version__ = "1.9.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index fedd655c..fd06ad98 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "0.1.0" + "version": "1.9.0" }, "snippets": [ { From 3ad04a0a5334d3de25f244d043cef15604e03123 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 16 Mar 2023 08:32:09 -0400 Subject: [PATCH 049/111] chore(deps): Update nox in .kokoro/requirements.in [autoapprove] (#421) Source-Link: https://github.com/googleapis/synthtool/commit/92006bb3cdc84677aa93c7f5235424ec2b157146 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/requirements.in | 2 +- .kokoro/requirements.txt | 14 +++++--------- 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 5fc5daa3..b8edda51 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 + digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index 882178ce..ec867d9f 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -5,6 +5,6 @@ typing-extensions twine wheel setuptools -nox +nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index fa99c129..66a2172a 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: # # pip-compile --allow-unsafe --generate-hashes requirements.in # @@ -335,9 +335,9 @@ more-itertools==9.0.0 \ --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes -nox==2022.8.7 \ - --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ - --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c +nox==2022.11.21 \ + --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ + --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 # via -r requirements.in packaging==21.3 \ --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ @@ -380,10 +380,6 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core -py==1.11.0 \ - --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ - --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 - # via nox pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba From 561dc395302f7989820db79f1c82aad04b7f6f7c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 23 Mar 2023 09:28:07 -0400 Subject: [PATCH 050/111] docs: Fix formatting of request arg in docstring (#422) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Fix formatting of request arg in docstring chore: Update gapic-generator-python to v1.9.1 PiperOrigin-RevId: 518604533 Source-Link: https://github.com/googleapis/googleapis/commit/8a085aeddfa010af5bcef090827aac5255383d7e Source-Link: https://github.com/googleapis/googleapis-gen/commit/b2ab4b0a0ae2907e812c209198a74e0898afcb04 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjJhYjRiMGEwYWUyOTA3ZTgxMmMyMDkxOThhNzRlMDg5OGFmY2IwNCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/error_group_service/transports/rest.py | 2 -- .../services/error_stats_service/async_client.py | 3 +-- .../services/error_stats_service/client.py | 3 +-- .../services/error_stats_service/transports/rest.py | 1 - .../services/report_errors_service/async_client.py | 4 ++-- .../services/report_errors_service/client.py | 4 ++-- .../services/report_errors_service/transports/rest.py | 1 - ..._metadata_google.devtools.clouderrorreporting.v1beta1.json | 2 +- 8 files changed, 7 insertions(+), 13 deletions(-) diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py index a85ea6ea..dcd0aefa 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py @@ -259,7 +259,6 @@ def __call__( request (~.error_group_service.GetGroupRequest): The request object. A request to return an individual group. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -349,7 +348,6 @@ def __call__( request (~.error_group_service.UpdateGroupRequest): The request object. A request to replace the existing data for the given group. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py index 1b45e394..6fb08857 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py @@ -261,8 +261,7 @@ async def sample_list_group_stats(): Args: request (Optional[Union[google.cloud.errorreporting_v1beta1.types.ListGroupStatsRequest, dict]]): - The request object. Specifies a set of `ErrorGroupStats` - to return. + The request object. Specifies a set of ``ErrorGroupStats`` to return. project_name (:class:`str`): Required. The resource name of the Google Cloud Platform project. Written as ``projects/{projectID}`` or diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py index ff3799e4..629a6ea6 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py @@ -483,8 +483,7 @@ def sample_list_group_stats(): Args: request (Union[google.cloud.errorreporting_v1beta1.types.ListGroupStatsRequest, dict]): - The request object. Specifies a set of `ErrorGroupStats` - to return. + The request object. Specifies a set of ``ErrorGroupStats`` to return. project_name (str): Required. The resource name of the Google Cloud Platform project. Written as ``projects/{projectID}`` or diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py index 779f59c5..a32ef79c 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py @@ -384,7 +384,6 @@ def __call__( request (~.error_stats_service.ListEventsRequest): The request object. Specifies a set of error events to return. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py index 7ccd15ec..bbaf19b8 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py @@ -269,8 +269,8 @@ async def sample_report_error_event(): Args: request (Optional[Union[google.cloud.errorreporting_v1beta1.types.ReportErrorEventRequest, dict]]): - The request object. A request for reporting an - individual error event. + The request object. A request for reporting an individual + error event. project_name (:class:`str`): Required. The resource name of the Google Cloud Platform project. Written as ``projects/{projectId}``, where diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py index f2d057b0..810b2aba 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py @@ -477,8 +477,8 @@ def sample_report_error_event(): Args: request (Union[google.cloud.errorreporting_v1beta1.types.ReportErrorEventRequest, dict]): - The request object. A request for reporting an - individual error event. + The request object. A request for reporting an individual + error event. project_name (str): Required. The resource name of the Google Cloud Platform project. Written as ``projects/{projectId}``, where diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py index cf238616..dd460cf2 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py @@ -233,7 +233,6 @@ def __call__( request (~.report_errors_service.ReportErrorEventRequest): The request object. A request for reporting an individual error event. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index fd06ad98..fedd655c 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "1.9.0" + "version": "0.1.0" }, "snippets": [ { From ebc33edbaf3518ec930d66fe9d671870fbd508fb Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 27 Mar 2023 11:57:53 -0400 Subject: [PATCH 051/111] chore(main): release 1.9.1 (#423) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 7 +++++++ google/cloud/error_reporting/gapic_version.py | 2 +- google/cloud/errorreporting_v1beta1/gapic_version.py | 2 +- ...tadata_google.devtools.clouderrorreporting.v1beta1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 44204dd0..627fb5f1 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.9.0" + ".": "1.9.1" } diff --git a/CHANGELOG.md b/CHANGELOG.md index dcf9c0bd..c8d4b09f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## [1.9.1](https://github.com/googleapis/python-error-reporting/compare/v1.9.0...v1.9.1) (2023-03-23) + + +### Documentation + +* Fix formatting of request arg in docstring ([#422](https://github.com/googleapis/python-error-reporting/issues/422)) ([561dc39](https://github.com/googleapis/python-error-reporting/commit/561dc395302f7989820db79f1c82aad04b7f6f7c)) + ## [1.9.0](https://github.com/googleapis/python-error-reporting/compare/v1.8.2...v1.9.0) (2023-02-28) diff --git a/google/cloud/error_reporting/gapic_version.py b/google/cloud/error_reporting/gapic_version.py index 163d1511..13f1b4a7 100644 --- a/google/cloud/error_reporting/gapic_version.py +++ b/google/cloud/error_reporting/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.0" # {x-release-please-version} +__version__ = "1.9.1" # {x-release-please-version} diff --git a/google/cloud/errorreporting_v1beta1/gapic_version.py b/google/cloud/errorreporting_v1beta1/gapic_version.py index 163d1511..13f1b4a7 100644 --- a/google/cloud/errorreporting_v1beta1/gapic_version.py +++ b/google/cloud/errorreporting_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.0" # {x-release-please-version} +__version__ = "1.9.1" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index fedd655c..bf184d5a 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "0.1.0" + "version": "1.9.1" }, "snippets": [ { From 3a7dbd181e9e464c2e3bb078f62e568d4ac09afa Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 25 May 2023 12:39:35 -0400 Subject: [PATCH 052/111] build(deps): bump requests from 2.28.1 to 2.31.0 in /synthtool/gcp/templates/python_library/.kokoro (#427) Source-Link: https://github.com/googleapis/synthtool/commit/30bd01b4ab78bf1b2a425816e15b3e7e090993dd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 3 ++- .kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index b8edda51..32b3c486 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 + digest: sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b +# created: 2023-05-25T14:56:16.294623272Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 66a2172a..3b8d7ee8 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -419,9 +419,9 @@ readme-renderer==37.3 \ --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine -requests==2.28.1 \ - --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ - --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 +requests==2.31.0 \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 # via # gcp-releasetool # google-api-core From 3974fecf5a7b61d4587dd2649077690d3e1aed0c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 3 Jun 2023 19:15:26 -0400 Subject: [PATCH 053/111] build(deps): bump cryptography from 39.0.1 to 41.0.0 in /synthtool/gcp/templates/python_library/.kokoro (#429) Source-Link: https://github.com/googleapis/synthtool/commit/d0f51a0c2a9a6bcca86911eabea9e484baadf64b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/requirements.txt | 42 +++++++++++++++++++-------------------- 2 files changed, 22 insertions(+), 24 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 32b3c486..02a4dedc 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b -# created: 2023-05-25T14:56:16.294623272Z + digest: sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc +# created: 2023-06-03T21:25:37.968717478Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 3b8d7ee8..c7929db6 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -113,28 +113,26 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==39.0.1 \ - --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ - --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ - --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ - --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ - --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ - --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ - --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ - --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ - --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ - --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ - --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ - --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ - --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ - --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ - --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ - --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ - --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ - --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ - --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ - --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ - --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 +cryptography==41.0.0 \ + --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ + --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ + --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ + --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ + --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ + --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ + --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ + --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ + --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ + --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ + --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ + --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ + --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ + --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ + --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ + --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ + --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ + --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ + --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be # via # gcp-releasetool # secretstorage From 2630357ea4a869efbfdcdb721a84ec44aa25f560 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 28 Jun 2023 11:53:57 -0400 Subject: [PATCH 054/111] chore: remove pinned Sphinx version [autoapprove] (#431) Source-Link: https://github.com/googleapis/synthtool/commit/909573ce9da2819eeb835909c795d29aea5c724e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ddf4551385d566771dc713090feb7b4c1164fb8a698fe52bbe7670b24236565b Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- noxfile.py | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 02a4dedc..1b3cb6c5 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc -# created: 2023-06-03T21:25:37.968717478Z + digest: sha256:ddf4551385d566771dc713090feb7b4c1164fb8a698fe52bbe7670b24236565b +# created: 2023-06-27T13:04:21.96690344Z diff --git a/noxfile.py b/noxfile.py index e9649dc6..1fa00483 100644 --- a/noxfile.py +++ b/noxfile.py @@ -304,10 +304,9 @@ def docfx(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", + "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", - "gcp-sphinx-docfx-yaml", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) From 78e3d3a52ce4b9862b50257fb99b93aabeba3130 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 29 Jun 2023 12:30:14 -0400 Subject: [PATCH 055/111] chore: store artifacts in placer (#433) Source-Link: https://github.com/googleapis/synthtool/commit/cb960373d12d20f8dc38beee2bf884d49627165e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/release/common.cfg | 9 +++++++++ noxfile.py | 2 +- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 1b3cb6c5..98994f47 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ddf4551385d566771dc713090feb7b4c1164fb8a698fe52bbe7670b24236565b -# created: 2023-06-27T13:04:21.96690344Z + digest: sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd +# created: 2023-06-28T17:03:33.371210701Z diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 5e979115..5c1ce9b6 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -39,6 +39,15 @@ env_vars: { value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } +# Store the packages we uploaded to PyPI. That way, we have a record of exactly +# what we published, which we can use to generate SBOMs and attestations. +action { + define_artifacts { + regex: "github/python-error-reporting/**/*.tar.gz" + strip_prefix: "github/python-error-reporting" + } +} + ############################################# # this section merged from .kokoro/common_env_vars.cfg using owlbot.py diff --git a/noxfile.py b/noxfile.py index 1fa00483..715640ff 100644 --- a/noxfile.py +++ b/noxfile.py @@ -378,6 +378,7 @@ def prerelease_deps(session): "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", + "google-auth", "proto-plus", "google-cloud-testutils", # dependencies of google-cloud-testutils" @@ -390,7 +391,6 @@ def prerelease_deps(session): # Remaining dependencies other_deps = [ "requests", - "google-auth", ] session.install(*other_deps) From 319fa54a19cb9aca3a079439fd7c6bd2c3163f71 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 4 Jul 2023 13:43:45 -0400 Subject: [PATCH 056/111] fix: Add async context manager return types (#434) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Add async context manager return types chore: Mock return_value should not populate oneof message fields chore: Support snippet generation for services that only support REST transport chore: Update gapic-generator-python to v1.11.0 PiperOrigin-RevId: 545430278 Source-Link: https://github.com/googleapis/googleapis/commit/601b5326107eeb74800b426d1f9933faa233258a Source-Link: https://github.com/googleapis/googleapis-gen/commit/b3f18d0f6560a855022fd058865e7620479d7af9 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjNmMThkMGY2NTYwYTg1NTAyMmZkMDU4ODY1ZTc2MjA0NzlkN2FmOSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/error_group_service/async_client.py | 2 +- .../services/error_stats_service/async_client.py | 2 +- .../services/report_errors_service/async_client.py | 2 +- ..._google.devtools.clouderrorreporting.v1beta1.json | 2 +- .../test_error_stats_service.py | 12 ++++++++---- 5 files changed, 12 insertions(+), 8 deletions(-) diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py index a8f33bee..bdc59765 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py @@ -427,7 +427,7 @@ async def sample_update_group(): # Done; return the response. return response - async def __aenter__(self): + async def __aenter__(self) -> "ErrorGroupServiceAsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py index 6fb08857..9bbfbcfd 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py @@ -597,7 +597,7 @@ async def sample_delete_events(): # Done; return the response. return response - async def __aenter__(self): + async def __aenter__(self) -> "ErrorStatsServiceAsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py index bbaf19b8..cfcad25b 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py @@ -348,7 +348,7 @@ async def sample_report_error_event(): # Done; return the response. return response - async def __aenter__(self): + async def __aenter__(self) -> "ReportErrorsServiceAsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index bf184d5a..fedd655c 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "1.9.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py index 4ea47201..76143e09 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py @@ -1178,9 +1178,11 @@ async def test_list_group_stats_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_group_stats(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1608,9 +1610,11 @@ async def test_list_events_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_events(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token From 597eb02cf53fbf5109ca4c70b8f09c39f88b26c2 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 5 Jul 2023 11:41:42 -0400 Subject: [PATCH 057/111] chore(main): release 1.9.2 (#435) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 7 +++++++ google/cloud/error_reporting/gapic_version.py | 2 +- google/cloud/errorreporting_v1beta1/gapic_version.py | 2 +- ...tadata_google.devtools.clouderrorreporting.v1beta1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 627fb5f1..50fc4585 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.9.1" + ".": "1.9.2" } diff --git a/CHANGELOG.md b/CHANGELOG.md index c8d4b09f..84fe7f1d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## [1.9.2](https://github.com/googleapis/python-error-reporting/compare/v1.9.1...v1.9.2) (2023-07-04) + + +### Bug Fixes + +* Add async context manager return types ([#434](https://github.com/googleapis/python-error-reporting/issues/434)) ([319fa54](https://github.com/googleapis/python-error-reporting/commit/319fa54a19cb9aca3a079439fd7c6bd2c3163f71)) + ## [1.9.1](https://github.com/googleapis/python-error-reporting/compare/v1.9.0...v1.9.1) (2023-03-23) diff --git a/google/cloud/error_reporting/gapic_version.py b/google/cloud/error_reporting/gapic_version.py index 13f1b4a7..ab4e0f59 100644 --- a/google/cloud/error_reporting/gapic_version.py +++ b/google/cloud/error_reporting/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.1" # {x-release-please-version} +__version__ = "1.9.2" # {x-release-please-version} diff --git a/google/cloud/errorreporting_v1beta1/gapic_version.py b/google/cloud/errorreporting_v1beta1/gapic_version.py index 13f1b4a7..ab4e0f59 100644 --- a/google/cloud/errorreporting_v1beta1/gapic_version.py +++ b/google/cloud/errorreporting_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.1" # {x-release-please-version} +__version__ = "1.9.2" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index fedd655c..631b8bce 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "0.1.0" + "version": "1.9.2" }, "snippets": [ { From cc695239f162cc79ba743e131af5e506f07e5162 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 10 Jul 2023 09:12:57 -0400 Subject: [PATCH 058/111] chore: Update gapic-generator-python to v1.11.2 (#436) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.11.2 PiperOrigin-RevId: 546510849 Source-Link: https://github.com/googleapis/googleapis/commit/736073ad9a9763a170eceaaa54519bcc0ea55a5e Source-Link: https://github.com/googleapis/googleapis-gen/commit/deb64e8ec19d141e31089fe932b3a997ad541c4d Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZGViNjRlOGVjMTlkMTQxZTMxMDg5ZmU5MzJiM2E5OTdhZDU0MWM0ZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- google/cloud/errorreporting_v1beta1/__init__.py | 2 +- google/cloud/errorreporting_v1beta1/services/__init__.py | 2 +- .../services/error_group_service/__init__.py | 2 +- .../services/error_group_service/async_client.py | 2 +- .../services/error_group_service/client.py | 2 +- .../services/error_group_service/transports/__init__.py | 2 +- .../services/error_group_service/transports/base.py | 2 +- .../services/error_group_service/transports/grpc.py | 2 +- .../services/error_group_service/transports/grpc_asyncio.py | 2 +- .../services/error_group_service/transports/rest.py | 2 +- .../services/error_stats_service/__init__.py | 2 +- .../services/error_stats_service/async_client.py | 2 +- .../services/error_stats_service/client.py | 2 +- .../services/error_stats_service/pagers.py | 2 +- .../services/error_stats_service/transports/__init__.py | 2 +- .../services/error_stats_service/transports/base.py | 2 +- .../services/error_stats_service/transports/grpc.py | 2 +- .../services/error_stats_service/transports/grpc_asyncio.py | 2 +- .../services/error_stats_service/transports/rest.py | 2 +- .../services/report_errors_service/__init__.py | 2 +- .../services/report_errors_service/async_client.py | 2 +- .../services/report_errors_service/client.py | 2 +- .../services/report_errors_service/transports/__init__.py | 2 +- .../services/report_errors_service/transports/base.py | 2 +- .../services/report_errors_service/transports/grpc.py | 2 +- .../services/report_errors_service/transports/grpc_asyncio.py | 2 +- .../services/report_errors_service/transports/rest.py | 2 +- google/cloud/errorreporting_v1beta1/types/__init__.py | 2 +- google/cloud/errorreporting_v1beta1/types/common.py | 2 +- .../cloud/errorreporting_v1beta1/types/error_group_service.py | 2 +- .../cloud/errorreporting_v1beta1/types/error_stats_service.py | 2 +- .../cloud/errorreporting_v1beta1/types/report_errors_service.py | 2 +- ...ing_v1beta1_generated_error_group_service_get_group_async.py | 2 +- ...ting_v1beta1_generated_error_group_service_get_group_sync.py | 2 +- ..._v1beta1_generated_error_group_service_update_group_async.py | 2 +- ...g_v1beta1_generated_error_group_service_update_group_sync.py | 2 +- ...v1beta1_generated_error_stats_service_delete_events_async.py | 2 +- ..._v1beta1_generated_error_stats_service_delete_events_sync.py | 2 +- ...g_v1beta1_generated_error_stats_service_list_events_async.py | 2 +- ...ng_v1beta1_generated_error_stats_service_list_events_sync.py | 2 +- ...eta1_generated_error_stats_service_list_group_stats_async.py | 2 +- ...beta1_generated_error_stats_service_list_group_stats_sync.py | 2 +- ..._generated_report_errors_service_report_error_event_async.py | 2 +- ...1_generated_report_errors_service_report_error_event_sync.py | 2 +- ...et_metadata_google.devtools.clouderrorreporting.v1beta1.json | 2 +- scripts/fixup_errorreporting_v1beta1_keywords.py | 2 +- tests/__init__.py | 2 +- tests/unit/__init__.py | 2 +- tests/unit/gapic/__init__.py | 2 +- tests/unit/gapic/errorreporting_v1beta1/__init__.py | 2 +- .../gapic/errorreporting_v1beta1/test_error_group_service.py | 2 +- .../gapic/errorreporting_v1beta1/test_error_stats_service.py | 2 +- .../gapic/errorreporting_v1beta1/test_report_errors_service.py | 2 +- 53 files changed, 53 insertions(+), 53 deletions(-) diff --git a/google/cloud/errorreporting_v1beta1/__init__.py b/google/cloud/errorreporting_v1beta1/__init__.py index 32bd5db9..e143641a 100644 --- a/google/cloud/errorreporting_v1beta1/__init__.py +++ b/google/cloud/errorreporting_v1beta1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/__init__.py b/google/cloud/errorreporting_v1beta1/services/__init__.py index e8e1c384..89a37dc9 100644 --- a/google/cloud/errorreporting_v1beta1/services/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/__init__.py index 62563e79..96ac56f8 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py index bdc59765..0d91c2fd 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py index 07f0aead..34b0119b 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py index b6d0629c..91a3752f 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py index 665ee116..3cbc6416 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py index 4145ab7b..d97d8ad8 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py index 69497298..0a1d829b 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py index dcd0aefa..71c259c2 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/__init__.py index b402e01a..3eda02c7 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py index 9bbfbcfd..bb758420 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py index 629a6ea6..75722883 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py index 9fe297c6..9170d159 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py index b1b560f3..f8a5fe87 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py index a2546b27..3899648b 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py index 21e0d0c2..4e751ed4 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py index b51a2ccb..b9b1be5f 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py index a32ef79c..b32a340e 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/__init__.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/__init__.py index 5efbd937..9b8290ad 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py index cfcad25b..929bac2c 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py index 810b2aba..60a9b15b 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py index 289d0ff1..d4d23aaf 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py index 77dc674a..54a7eda6 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py index 0ff8e435..e81bad5c 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py index eea7a486..d779e686 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py index dd460cf2..3a959003 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/types/__init__.py b/google/cloud/errorreporting_v1beta1/types/__init__.py index fed4105b..9b575327 100644 --- a/google/cloud/errorreporting_v1beta1/types/__init__.py +++ b/google/cloud/errorreporting_v1beta1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/types/common.py b/google/cloud/errorreporting_v1beta1/types/common.py index 43ac4ba2..c8fc2a1b 100644 --- a/google/cloud/errorreporting_v1beta1/types/common.py +++ b/google/cloud/errorreporting_v1beta1/types/common.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/types/error_group_service.py b/google/cloud/errorreporting_v1beta1/types/error_group_service.py index 63ecabf9..daecedde 100644 --- a/google/cloud/errorreporting_v1beta1/types/error_group_service.py +++ b/google/cloud/errorreporting_v1beta1/types/error_group_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py index afb2668a..165d800b 100644 --- a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py +++ b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/types/report_errors_service.py b/google/cloud/errorreporting_v1beta1/types/report_errors_service.py index a3087c1a..7d6b5eee 100644 --- a/google/cloud/errorreporting_v1beta1/types/report_errors_service.py +++ b/google/cloud/errorreporting_v1beta1/types/report_errors_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py index b4a207ad..702f5cda 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py index e4f473de..074978d9 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py index 6ee4a823..8b345298 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py index 4f8f6bca..7c38b173 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py index bc8eb7be..a1cd90bf 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py index c03edfb3..f8c08c1b 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py index 0cd82da6..1ec25ba7 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py index dbed18fa..81c7cc4c 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py index c78b943f..60e47686 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py index a90ceaef..16d09dc4 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py index 400a567a..ba590771 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py index 2260dd1b..c1fb7124 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index 631b8bce..fedd655c 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "1.9.2" + "version": "0.1.0" }, "snippets": [ { diff --git a/scripts/fixup_errorreporting_v1beta1_keywords.py b/scripts/fixup_errorreporting_v1beta1_keywords.py index db88785c..813f0be6 100644 --- a/scripts/fixup_errorreporting_v1beta1_keywords.py +++ b/scripts/fixup_errorreporting_v1beta1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/__init__.py b/tests/__init__.py index e8e1c384..89a37dc9 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index e8e1c384..89a37dc9 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py index e8e1c384..89a37dc9 100644 --- a/tests/unit/gapic/__init__.py +++ b/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/errorreporting_v1beta1/__init__.py b/tests/unit/gapic/errorreporting_v1beta1/__init__.py index e8e1c384..89a37dc9 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/__init__.py +++ b/tests/unit/gapic/errorreporting_v1beta1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py index 90643d98..8f4bc53d 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py index 76143e09..d2e77d24 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py index 3a69144f..9824fea4 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From ad44288d39d7f8105214e5f892853178988ca143 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 14 Jul 2023 10:19:04 -0400 Subject: [PATCH 059/111] chore: Update gapic-generator-python to v1.11.4 (#437) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.11.4 PiperOrigin-RevId: 547897126 Source-Link: https://github.com/googleapis/googleapis/commit/c09c75e087d8f9a2d466b4aaad7dd2926b5ead5a Source-Link: https://github.com/googleapis/googleapis-gen/commit/45e0ec4343517cd0aa66b5ca64232a1802c2f945 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDVlMGVjNDM0MzUxN2NkMGFhNjZiNWNhNjQyMzJhMTgwMmMyZjk0NSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- google/cloud/errorreporting_v1beta1/types/common.py | 1 + google/cloud/errorreporting_v1beta1/types/error_stats_service.py | 1 + 2 files changed, 2 insertions(+) diff --git a/google/cloud/errorreporting_v1beta1/types/common.py b/google/cloud/errorreporting_v1beta1/types/common.py index c8fc2a1b..947b89a3 100644 --- a/google/cloud/errorreporting_v1beta1/types/common.py +++ b/google/cloud/errorreporting_v1beta1/types/common.py @@ -189,6 +189,7 @@ class ServiceContext(proto.Message): resource_type (str): Type of the MonitoredResource. List of possible values: + https://cloud.google.com/monitoring/api/resources Value is set automatically for incoming errors and must not be set when reporting errors. diff --git a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py index 165d800b..cb0a5091 100644 --- a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py +++ b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py @@ -264,6 +264,7 @@ class ErrorGroupStats(proto.Message): Approximate number of occurrences over time. Timed counts returned by ListGroups are guaranteed to be: + - Inside the requested time interval - Non-overlapping, and - Ordered by ascending time. From 74049e38bb1e6f9c27c813cefa1b098742ecabe9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 17 Jul 2023 11:52:50 -0400 Subject: [PATCH 060/111] build(deps): [autoapprove] bump cryptography from 41.0.0 to 41.0.2 (#439) Source-Link: https://github.com/googleapis/synthtool/commit/d6103f4a3540ba60f633a9e25c37ec5fe7e6286d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb Co-authored-by: Owl Bot --- .flake8 | 2 +- .github/.OwlBot.lock.yaml | 4 +-- .kokoro/build.sh | 2 +- .kokoro/docker/docs/Dockerfile | 2 +- .kokoro/populate-secrets.sh | 2 +- .kokoro/publish-docs.sh | 2 +- .kokoro/release.sh | 2 +- .kokoro/requirements.txt | 44 +++++++++++++++------------- .kokoro/test-samples-against-head.sh | 2 +- .kokoro/test-samples-impl.sh | 2 +- .kokoro/test-samples.sh | 2 +- .kokoro/trampoline.sh | 2 +- .kokoro/trampoline_v2.sh | 2 +- .pre-commit-config.yaml | 2 +- .trampolinerc | 4 +-- MANIFEST.in | 2 +- README.rst | 27 +++++++++-------- docs/conf.py | 2 +- noxfile.py | 3 +- scripts/decrypt-secrets.sh | 2 +- scripts/readme-gen/readme_gen.py | 18 ++++++------ setup.cfg | 2 +- 22 files changed, 68 insertions(+), 64 deletions(-) diff --git a/.flake8 b/.flake8 index 2e438749..87f6e408 100644 --- a/.flake8 +++ b/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 98994f47..ae4a522b 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd -# created: 2023-06-28T17:03:33.371210701Z + digest: sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb +# created: 2023-07-17T15:20:13.819193964Z diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 165d0e08..77234eac 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index f8137d0a..8e39a2cc 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh index f5251425..6f397214 100755 --- a/.kokoro/populate-secrets.sh +++ b/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC. +# Copyright 2023 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 1c4d6237..9eafe0be 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 4a5ad17a..716547f1 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index c7929db6..67d70a11 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -113,26 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.0 \ - --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ - --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ - --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ - --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ - --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ - --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ - --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ - --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ - --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ - --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ - --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ - --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ - --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ - --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ - --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ - --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ - --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ - --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ - --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be +cryptography==41.0.2 \ + --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ + --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ + --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ + --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ + --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ + --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ + --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ + --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ + --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ + --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ + --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ + --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ + --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ + --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ + --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ + --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ + --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ + --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ + --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ + --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ + --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ + --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ + --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 # via # gcp-releasetool # secretstorage diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh index ba3a707b..63ac41df 100755 --- a/.kokoro/test-samples-against-head.sh +++ b/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index 2c6500ca..5a0f5fab 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index 11c042d3..50b35a48 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index f39236e9..d85b1f26 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2017 Google Inc. +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh index 4af6cdc2..59a7cf3a 100755 --- a/.kokoro/trampoline_v2.sh +++ b/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5405cc8f..9e3898fd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.trampolinerc b/.trampolinerc index 0eee72ab..a7dfeb42 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Template for .trampolinerc - # Add required env vars here. required_envvars+=( ) diff --git a/MANIFEST.in b/MANIFEST.in index e783f4c6..e0a66705 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/README.rst b/README.rst index 46fa4e0c..83ec1724 100644 --- a/README.rst +++ b/README.rst @@ -36,21 +36,24 @@ In order to use this library, you first need to go through the following steps: Installation ~~~~~~~~~~~~ -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. +Install this library in a virtual environment using `venv`_. `venv`_ is a tool that +creates isolated Python environments. These isolated environments can have separate +versions of Python packages, which allows you to isolate one project's dependencies +from the dependencies of other projects. -With `virtualenv`_, it's possible to install this library without needing system +With `venv`_, it's possible to install this library without needing system install permissions, and without clashing with the installed system dependencies. -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +.. _`venv`: https://docs.python.org/3/library/venv.html Code samples and snippets ~~~~~~~~~~~~~~~~~~~~~~~~~ -Code samples and snippets live in the `samples/` folder. +Code samples and snippets live in the `samples/`_ folder. + +.. _samples/: https://github.com/googleapis/python-error-reporting/tree/main/samples Supported Python Versions @@ -77,10 +80,9 @@ Mac/Linux .. code-block:: console - pip install virtualenv - virtualenv + python3 -m venv source /bin/activate - /bin/pip install google-cloud-error-reporting + pip install google-cloud-error-reporting Windows @@ -88,10 +90,9 @@ Windows .. code-block:: console - pip install virtualenv - virtualenv - \Scripts\activate - \Scripts\pip.exe install google-cloud-error-reporting + py -m venv + .\\Scripts\activate + pip install google-cloud-error-reporting Next Steps ~~~~~~~~~~ diff --git a/docs/conf.py b/docs/conf.py index c8f4857a..99304e78 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/noxfile.py b/noxfile.py index 715640ff..5ff54976 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -399,6 +399,7 @@ def prerelease_deps(session): "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" ) session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh index 21f6d2a2..0018b421 100755 --- a/scripts/decrypt-secrets.sh +++ b/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py index 91b59676..1acc1198 100644 --- a/scripts/readme-gen/readme_gen.py +++ b/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2016 Google Inc +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -33,17 +33,17 @@ autoescape=True, ) -README_TMPL = jinja_env.get_template('README.tmpl.rst') +README_TMPL = jinja_env.get_template("README.tmpl.rst") def get_help(file): - return subprocess.check_output(['python', file, '--help']).decode() + return subprocess.check_output(["python", file, "--help"]).decode() def main(): parser = argparse.ArgumentParser() - parser.add_argument('source') - parser.add_argument('--destination', default='README.rst') + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") args = parser.parse_args() @@ -51,9 +51,9 @@ def main(): root = os.path.dirname(source) destination = os.path.join(root, args.destination) - jinja_env.globals['get_help'] = get_help + jinja_env.globals["get_help"] = get_help - with io.open(source, 'r') as f: + with io.open(source, "r") as f: config = yaml.load(f) # This allows get_help to execute in the right directory. @@ -61,9 +61,9 @@ def main(): output = README_TMPL.render(config) - with io.open(destination, 'w') as f: + with io.open(destination, "w") as f: f.write(output) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/setup.cfg b/setup.cfg index c3a2b39f..05235008 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 5f2656fac9623fb555efa6716658c30fc3a3b122 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 21 Jul 2023 10:17:53 -0400 Subject: [PATCH 061/111] build(deps): [autoapprove] bump pygments from 2.13.0 to 2.15.0 (#441) Source-Link: https://github.com/googleapis/synthtool/commit/eaef28efd179e6eeb9f4e9bf697530d074a6f3b9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index ae4a522b..17c21d96 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb -# created: 2023-07-17T15:20:13.819193964Z + digest: sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e +# created: 2023-07-21T02:12:46.49799314Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 67d70a11..b563eb28 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -396,9 +396,9 @@ pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.13.0 \ - --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ - --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 +pygments==2.15.0 \ + --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ + --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 # via # readme-renderer # rich From da46a2fcc21a3f2f4873cda8190ced57aed3936e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 27 Jul 2023 05:48:01 -0400 Subject: [PATCH 062/111] build(deps): [autoapprove] bump certifi from 2022.12.7 to 2023.7.22 (#443) Source-Link: https://github.com/googleapis/synthtool/commit/395d53adeeacfca00b73abf197f65f3c17c8f1e9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 17c21d96..0ddd0e4d 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e -# created: 2023-07-21T02:12:46.49799314Z + digest: sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 +# created: 2023-07-25T21:01:10.396410762Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index b563eb28..76d9bba0 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.12.7 \ - --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ - --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 +certifi==2023.7.22 \ + --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ + --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ From 94f9da6f658c9da121ab917a7f79789b8fc6e745 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 1 Aug 2023 16:17:57 -0400 Subject: [PATCH 063/111] chore: [autoapprove] Pin flake8 version (#444) Source-Link: https://github.com/googleapis/synthtool/commit/0ddbff8012e47cde4462fe3f9feab01fbc4cdfd6 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:bced5ca77c4dda0fd2f5d845d4035fc3c5d3d6b81f245246a36aee114970082b Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .pre-commit-config.yaml | 2 +- noxfile.py | 3 ++- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 0ddd0e4d..d71329cc 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 -# created: 2023-07-25T21:01:10.396410762Z + digest: sha256:bced5ca77c4dda0fd2f5d845d4035fc3c5d3d6b81f245246a36aee114970082b +# created: 2023-08-01T17:41:45.434027321Z diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9e3898fd..19409cbd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://github.com/pycqa/flake8 - rev: 3.9.2 + rev: 6.1.0 hooks: - id: flake8 diff --git a/noxfile.py b/noxfile.py index 5ff54976..2b3b1bfa 100644 --- a/noxfile.py +++ b/noxfile.py @@ -25,6 +25,7 @@ import nox +FLAKE8_VERSION = "flake8==6.1.0" BLACK_VERSION = "black==22.3.0" ISORT_VERSION = "isort==5.10.1" LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] @@ -81,7 +82,7 @@ def lint(session): Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.install("flake8", BLACK_VERSION) + session.install(FLAKE8_VERSION, BLACK_VERSION) session.run( "black", "--check", From f1dd046fd4e318fca3a07ad27e4d765de8d75fa5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 3 Aug 2023 10:39:02 -0400 Subject: [PATCH 064/111] build: [autoapprove] bump cryptography from 41.0.2 to 41.0.3 (#446) Source-Link: https://github.com/googleapis/synthtool/commit/352b9d4c068ce7c05908172af128b294073bf53c Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/requirements.txt | 48 +++++++++++++++++++-------------------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index d71329cc..a3da1b0d 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:bced5ca77c4dda0fd2f5d845d4035fc3c5d3d6b81f245246a36aee114970082b -# created: 2023-08-01T17:41:45.434027321Z + digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 +# created: 2023-08-02T10:53:29.114535628Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 76d9bba0..029bd342 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -113,30 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.2 \ - --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ - --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ - --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ - --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ - --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ - --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ - --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ - --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ - --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ - --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ - --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ - --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ - --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ - --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ - --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ - --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ - --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ - --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ - --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ - --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ - --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ - --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ - --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 +cryptography==41.0.3 \ + --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \ + --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \ + --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \ + --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \ + --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \ + --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \ + --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \ + --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \ + --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \ + --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \ + --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \ + --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \ + --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \ + --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \ + --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \ + --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \ + --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \ + --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \ + --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \ + --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \ + --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \ + --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \ + --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de # via # gcp-releasetool # secretstorage From 48823d4529fc2a2ac7b6c3f745c3ea5cb0ec9d38 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 13 Sep 2023 14:04:44 -0700 Subject: [PATCH 065/111] docs: Minor formatting (#448) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Minor formatting chore: Update gapic-generator-python to v1.11.5 build: Update rules_python to 0.24.0 PiperOrigin-RevId: 563436317 Source-Link: https://github.com/googleapis/googleapis/commit/42fd37b18d706f6f51f52f209973b3b2c28f509a Source-Link: https://github.com/googleapis/googleapis-gen/commit/280264ca02fb9316b4237a96d0af1a2343a81a56 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjgwMjY0Y2EwMmZiOTMxNmI0MjM3YTk2ZDBhZjFhMjM0M2E4MWE1NiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- google/cloud/errorreporting_v1beta1/types/common.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/errorreporting_v1beta1/types/common.py b/google/cloud/errorreporting_v1beta1/types/common.py index 947b89a3..6342410b 100644 --- a/google/cloud/errorreporting_v1beta1/types/common.py +++ b/google/cloud/errorreporting_v1beta1/types/common.py @@ -189,8 +189,8 @@ class ServiceContext(proto.Message): resource_type (str): Type of the MonitoredResource. List of possible values: - https://cloud.google.com/monitoring/api/resources + Value is set automatically for incoming errors and must not be set when reporting errors. """ From 2ddb45fd18086364503809ca52ecc8d63860ed55 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 Oct 2023 21:51:02 -0400 Subject: [PATCH 066/111] chore: [autoapprove] bump cryptography from 41.0.3 to 41.0.4 (#451) Source-Link: https://github.com/googleapis/synthtool/commit/dede53ff326079b457cfb1aae5bbdc82cbb51dc3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .gitignore | 1 + .kokoro/requirements.txt | 49 ++++++++++++++++++++------------------- 3 files changed, 28 insertions(+), 26 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index a3da1b0d..a9bdb1b7 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 -# created: 2023-08-02T10:53:29.114535628Z + digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb +# created: 2023-10-02T21:31:03.517640371Z diff --git a/.gitignore b/.gitignore index b4243ced..d083ea1d 100644 --- a/.gitignore +++ b/.gitignore @@ -50,6 +50,7 @@ docs.metadata # Virtual environment env/ +venv/ # Test logs coverage.xml diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 029bd342..96d593c8 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -113,30 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.3 \ - --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \ - --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \ - --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \ - --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \ - --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \ - --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \ - --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \ - --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \ - --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \ - --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \ - --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \ - --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \ - --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \ - --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \ - --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \ - --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \ - --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \ - --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \ - --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \ - --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \ - --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \ - --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \ - --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de +cryptography==41.0.4 \ + --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ + --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ + --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ + --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ + --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ + --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ + --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ + --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ + --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ + --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ + --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ + --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ + --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ + --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ + --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ + --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ + --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ + --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ + --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ + --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ + --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ + --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ + --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f # via # gcp-releasetool # secretstorage @@ -382,6 +382,7 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core + # googleapis-common-protos pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba From 4401235cb460392b890d559cb9e0eaf550416a0a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 9 Oct 2023 11:04:31 -0400 Subject: [PATCH 067/111] chore: [autoapprove] Update `black` and `isort` to latest versions (#453) Source-Link: https://github.com/googleapis/synthtool/commit/0c7b0333f44b2b7075447f43a121a12d15a7b76a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/requirements.txt | 6 +++--- .pre-commit-config.yaml | 2 +- noxfile.py | 36 +++++++++++++++++++----------------- tests/unit/test__gapic.py | 1 - tests/unit/test__logging.py | 1 - tests/unit/test_client.py | 1 - 7 files changed, 25 insertions(+), 26 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index a9bdb1b7..dd98abbd 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb -# created: 2023-10-02T21:31:03.517640371Z + digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 +# created: 2023-10-09T14:06:13.397766266Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 96d593c8..0332d326 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.12 \ - --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ - --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 +urllib3==1.26.17 \ + --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ + --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b # via # requests # twine diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 19409cbd..6a8e1695 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 23.7.0 hooks: - id: black - repo: https://github.com/pycqa/flake8 diff --git a/noxfile.py b/noxfile.py index 2b3b1bfa..54abbaab 100644 --- a/noxfile.py +++ b/noxfile.py @@ -17,22 +17,24 @@ # Generated by synthtool. DO NOT EDIT! from __future__ import absolute_import + import os import pathlib import re import shutil +from typing import Dict, List import warnings import nox FLAKE8_VERSION = "flake8==6.1.0" -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -40,23 +42,23 @@ "pytest-cov", "pytest-asyncio", ] -UNIT_TEST_EXTERNAL_DEPENDENCIES = [] -UNIT_TEST_LOCAL_DEPENDENCIES = [] -UNIT_TEST_DEPENDENCIES = [] -UNIT_TEST_EXTRAS = [] -UNIT_TEST_EXTRAS_BY_PYTHON = {} - -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.8"] +SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ "mock", "pytest", "google-cloud-testutils", ] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] -SYSTEM_TEST_LOCAL_DEPENDENCIES = [] -SYSTEM_TEST_DEPENDENCIES = [] -SYSTEM_TEST_EXTRAS = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON = {} +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -69,6 +71,7 @@ "lint_setup_py", "blacken", "docs", + "format", ] # Error if a python version is missing @@ -187,7 +190,6 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): - # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. # See https://github.com/grpc/grpc/issues/32163 diff --git a/tests/unit/test__gapic.py b/tests/unit/test__gapic.py index 365eee27..52e525e3 100644 --- a/tests/unit/test__gapic.py +++ b/tests/unit/test__gapic.py @@ -49,7 +49,6 @@ def test_make_report_error_api(self): class Test_ErrorReportingGapicApi(unittest.TestCase): - PROJECT = "PROJECT" def _make_one(self, gapic_api, project): diff --git a/tests/unit/test__logging.py b/tests/unit/test__logging.py index c5b1cc32..433c9192 100644 --- a/tests/unit/test__logging.py +++ b/tests/unit/test__logging.py @@ -24,7 +24,6 @@ def _make_credentials(): class Test_ErrorReportingLoggingAPI(unittest.TestCase): - PROJECT = "PROJECT" def _make_one(self, project, credentials, **kw): diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 3a7290e8..e53b1545 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -25,7 +25,6 @@ def _make_credentials(): class TestClient(unittest.TestCase): - PROJECT = "PROJECT" SERVICE = "SERVICE" VERSION = "myversion" From f1f7fad81c2fb181a79fee6ee7c99170f32d968e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Oct 2023 07:25:04 -0400 Subject: [PATCH 068/111] chore(deps): bump urllib3 from 1.26.17 to 1.26.18 in /.kokoro (#456) Source-Link: https://github.com/googleapis/synthtool/commit/d52e638b37b091054c869bfa6f5a9fedaba9e0dd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index dd98abbd..7f291dbd 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 -# created: 2023-10-09T14:06:13.397766266Z + digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 +# created: 2023-10-18T20:26:37.410353675Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 0332d326..16170d0c 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.17 \ - --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ - --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b +urllib3==1.26.18 \ + --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ + --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 # via # requests # twine From 48eb4c98908ae266696355584925a2d86de1bd98 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Oct 2023 07:34:34 -0400 Subject: [PATCH 069/111] chore: Update gapic-generator-python to v1.11.9 (#454) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.11.7 PiperOrigin-RevId: 573230664 Source-Link: https://github.com/googleapis/googleapis/commit/93beed334607e70709cc60e6145be65fdc8ec386 Source-Link: https://github.com/googleapis/googleapis-gen/commit/f4a4edaa8057639fcf6adf9179872280d1a8f651 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZjRhNGVkYWE4MDU3NjM5ZmNmNmFkZjkxNzk4NzIyODBkMWE4ZjY1MSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.11.8 PiperOrigin-RevId: 574178735 Source-Link: https://github.com/googleapis/googleapis/commit/7307199008ee2d57a4337066de29f9cd8c444bc6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ce3af21b7c559a87c2befc076be0e3aeda3a26f0 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2UzYWYyMWI3YzU1OWE4N2MyYmVmYzA3NmJlMGUzYWVkYTNhMjZmMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.11.9 PiperOrigin-RevId: 574520922 Source-Link: https://github.com/googleapis/googleapis/commit/5183984d611beb41e90f65f08609b9d926f779bd Source-Link: https://github.com/googleapis/googleapis-gen/commit/a59af19d4ac6509faedf1cc39029141b6a5b8968 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTU5YWYxOWQ0YWM2NTA5ZmFlZGYxY2MzOTAyOTE0MWI2YTViODk2OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * remove obsolete rst files --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../{services.rst => services_.rst} | 0 .../{types.rst => types_.rst} | 0 docs/index.rst | 4 +- .../test_error_group_service.py | 103 +++++++++++++--- .../test_error_stats_service.py | 47 ++++---- .../test_report_errors_service.py | 111 ++++++++++++------ 6 files changed, 190 insertions(+), 75 deletions(-) rename docs/errorreporting_v1beta1/{services.rst => services_.rst} (100%) rename docs/errorreporting_v1beta1/{types.rst => types_.rst} (100%) diff --git a/docs/errorreporting_v1beta1/services.rst b/docs/errorreporting_v1beta1/services_.rst similarity index 100% rename from docs/errorreporting_v1beta1/services.rst rename to docs/errorreporting_v1beta1/services_.rst diff --git a/docs/errorreporting_v1beta1/types.rst b/docs/errorreporting_v1beta1/types_.rst similarity index 100% rename from docs/errorreporting_v1beta1/types.rst rename to docs/errorreporting_v1beta1/types_.rst diff --git a/docs/index.rst b/docs/index.rst index 949b1c9d..cc149749 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -19,8 +19,8 @@ API Reference client util - errorreporting_v1beta1/services - errorreporting_v1beta1/types + errorreporting_v1beta1/services_ + errorreporting_v1beta1/types_ Changelog diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py index 8f4bc53d..ea30d474 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py @@ -1239,8 +1239,9 @@ def test_get_group_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = common.ErrorGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.ErrorGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1317,8 +1318,9 @@ def test_get_group_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = common.ErrorGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.ErrorGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1441,8 +1443,9 @@ def test_get_group_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = common.ErrorGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.ErrorGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1500,6 +1503,73 @@ def test_update_group_rest(request_type): "tracking_issues": [{"url": "url_value"}], "resolution_status": 1, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = error_group_service.UpdateGroupRequest.meta.fields["group"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["group"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["group"][field])): + del request_init["group"][field][i][subfield] + else: + del request_init["group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1514,8 +1584,9 @@ def test_update_group_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = common.ErrorGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.ErrorGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1588,8 +1659,9 @@ def test_update_group_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = common.ErrorGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.ErrorGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1675,12 +1747,6 @@ def test_update_group_rest_bad_request( # send a request that will satisfy transcoding request_init = {"group": {"name": "projects/sample1/groups/sample2"}} - request_init["group"] = { - "name": "projects/sample1/groups/sample2", - "group_id": "group_id_value", - "tracking_issues": [{"url": "url_value"}], - "resolution_status": 1, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1718,8 +1784,9 @@ def test_update_group_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = common.ErrorGroup.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = common.ErrorGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py index d2e77d24..29e94f34 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py @@ -1874,8 +1874,9 @@ def test_list_group_stats_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = error_stats_service.ListGroupStatsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = error_stats_service.ListGroupStatsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1964,10 +1965,9 @@ def test_list_group_stats_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = error_stats_service.ListGroupStatsResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = error_stats_service.ListGroupStatsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2110,8 +2110,9 @@ def test_list_group_stats_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = error_stats_service.ListGroupStatsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = error_stats_service.ListGroupStatsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2235,8 +2236,9 @@ def test_list_events_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = error_stats_service.ListEventsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = error_stats_service.ListEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2328,8 +2330,9 @@ def test_list_events_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = error_stats_service.ListEventsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = error_stats_service.ListEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2477,8 +2480,9 @@ def test_list_events_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = error_stats_service.ListEventsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = error_stats_service.ListEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2598,8 +2602,9 @@ def test_delete_events_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = error_stats_service.DeleteEventsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = error_stats_service.DeleteEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2673,8 +2678,9 @@ def test_delete_events_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = error_stats_service.DeleteEventsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = error_stats_service.DeleteEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -2799,8 +2805,9 @@ def test_delete_events_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = error_stats_service.DeleteEventsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = error_stats_service.DeleteEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py index 9824fea4..1f84c2c1 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py @@ -1060,6 +1060,73 @@ def test_report_error_event_rest(request_type): }, }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = report_errors_service.ReportErrorEventRequest.meta.fields["event"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["event"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["event"][field])): + del request_init["event"][field][i][subfield] + else: + del request_init["event"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -1070,10 +1137,9 @@ def test_report_error_event_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = report_errors_service.ReportErrorEventResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = report_errors_service.ReportErrorEventResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1148,10 +1214,11 @@ def test_report_error_event_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = report_errors_service.ReportErrorEventResponse.pb( + # Convert return value to protobuf type + return_value = report_errors_service.ReportErrorEventResponse.pb( return_value ) - json_return_value = json_format.MessageToJson(pb_return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -1249,31 +1316,6 @@ def test_report_error_event_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project_name": "projects/sample1"} - request_init["event"] = { - "event_time": {"seconds": 751, "nanos": 543}, - "service_context": { - "service": "service_value", - "version": "version_value", - "resource_type": "resource_type_value", - }, - "message": "message_value", - "context": { - "http_request": { - "method": "method_value", - "url": "url_value", - "user_agent": "user_agent_value", - "referrer": "referrer_value", - "response_status_code": 2156, - "remote_ip": "remote_ip_value", - }, - "user": "user_value", - "report_location": { - "file_path": "file_path_value", - "line_number": 1168, - "function_name": "function_name_value", - }, - }, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -1314,10 +1356,9 @@ def test_report_error_event_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = report_errors_service.ReportErrorEventResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = report_errors_service.ReportErrorEventResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value From 8328123b07e23df2d38b21cb52310e4debdfbdc3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 2 Nov 2023 21:16:14 -0400 Subject: [PATCH 070/111] chore: update docfx minimum Python version (#457) Source-Link: https://github.com/googleapis/synthtool/commit/bc07fd415c39853b382bcf8315f8eeacdf334055 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .github/workflows/docs.yml | 2 +- noxfile.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 7f291dbd..ec696b55 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 -# created: 2023-10-18T20:26:37.410353675Z + digest: sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 +# created: 2023-11-03T00:57:07.335914631Z diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index e97d89e4..221806ce 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -28,7 +28,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.9" + python-version: "3.10" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/noxfile.py b/noxfile.py index 54abbaab..91389c37 100644 --- a/noxfile.py +++ b/noxfile.py @@ -301,7 +301,7 @@ def docs(session): ) -@nox.session(python="3.9") +@nox.session(python="3.10") def docfx(session): """Build the docfx yaml files for this library.""" From 5fcc703fdc5fbef270dbc5ae72504e0f687412e5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 15 Nov 2023 02:28:46 -0500 Subject: [PATCH 071/111] chore: bump urllib3 from 1.26.12 to 1.26.18 (#458) Source-Link: https://github.com/googleapis/synthtool/commit/febacccc98d6d224aff9d0bd0373bb5a4cd5969c Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +- .kokoro/requirements.txt | 532 ++++++++++++++++++++------------------ 2 files changed, 277 insertions(+), 259 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index ec696b55..453b540c 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 -# created: 2023-11-03T00:57:07.335914631Z + digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 +# created: 2023-11-08T19:46:45.022803742Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 16170d0c..8957e211 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -4,91 +4,75 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==2.0.0 \ - --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ - --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e +argcomplete==3.1.4 \ + --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ + --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f # via nox -attrs==22.1.0 \ - --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ - --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c +attrs==23.1.0 \ + --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ + --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 # via gcp-releasetool -bleach==5.0.1 \ - --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ - --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c - # via readme-renderer -cachetools==5.2.0 \ - --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ - --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db +cachetools==5.3.2 \ + --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ + --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 # via google-auth certifi==2023.7.22 \ --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests -cffi==1.15.1 \ - --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ - --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ - --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ - --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ - --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ - --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ - --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ - --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ - --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ - --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ - --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ - --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ - --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ - --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ - --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ - --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ - --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ - --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ - --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ - --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ - --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ - --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ - --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ - --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ - --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ - --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ - --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ - --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ - --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ - --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ - --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ - --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ - --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ - --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ - --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ - --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ - --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ - --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ - --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ - --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ - --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ - --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ - --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ - --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ - --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ - --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ - --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ - --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ - --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ - --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ - --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ - --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ - --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ - --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ - --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ - --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ - --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ - --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ - --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ - --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ - --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ - --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ - --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ - --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 # via cryptography charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ @@ -109,78 +93,74 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -commonmark==0.9.1 \ - --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ - --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 - # via rich -cryptography==41.0.4 \ - --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ - --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ - --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ - --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ - --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ - --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ - --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ - --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ - --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ - --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ - --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ - --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ - --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ - --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ - --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ - --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ - --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ - --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ - --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ - --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ - --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ - --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ - --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f +cryptography==41.0.5 \ + --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ + --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ + --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ + --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ + --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ + --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ + --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ + --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ + --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ + --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ + --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ + --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ + --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ + --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ + --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ + --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ + --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ + --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ + --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ + --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ + --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ + --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ + --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 # via # gcp-releasetool # secretstorage -distlib==0.3.6 \ - --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ - --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e +distlib==0.3.7 \ + --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ + --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 # via virtualenv -docutils==0.19 \ - --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ - --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc +docutils==0.20.1 \ + --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ + --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b # via readme-renderer -filelock==3.8.0 \ - --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ - --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c # via virtualenv -gcp-docuploader==0.6.4 \ - --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ - --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.10.5 \ - --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ - --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 +gcp-releasetool==1.16.0 \ + --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ + --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 # via -r requirements.in -google-api-core==2.10.2 \ - --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ - --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e +google-api-core==2.12.0 \ + --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ + --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 # via # google-cloud-core # google-cloud-storage -google-auth==2.14.1 \ - --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ - --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 +google-auth==2.23.4 \ + --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ + --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.2 \ - --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ - --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a +google-cloud-core==2.3.3 \ + --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ + --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 # via google-cloud-storage -google-cloud-storage==2.6.0 \ - --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ - --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 +google-cloud-storage==2.13.0 \ + --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ + --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -251,29 +231,31 @@ google-crc32c==1.5.0 \ --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via google-resumable-media -google-resumable-media==2.4.0 \ - --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ - --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.6.0 \ + --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ + --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b # via google-cloud-storage -googleapis-common-protos==1.57.0 \ - --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ - --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c +googleapis-common-protos==1.61.0 \ + --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ + --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via google-api-core idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==5.0.0 \ - --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ - --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 +importlib-metadata==6.8.0 \ + --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ + --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 # via # -r requirements.in # keyring # twine -jaraco-classes==3.2.3 \ - --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ - --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a +jaraco-classes==3.3.0 \ + --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ + --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -285,75 +267,121 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.11.0 \ - --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ - --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 +keyring==24.2.0 \ + --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ + --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 # via # gcp-releasetool # twine -markupsafe==2.1.1 \ - --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ - --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ - --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ - --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ - --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ - --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ - --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ - --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ - --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ - --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ - --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ - --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ - --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ - --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ - --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ - --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ - --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ - --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ - --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ - --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ - --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ - --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ - --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ - --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ - --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ - --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ - --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ - --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ - --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ - --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ - --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ - --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ - --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ - --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ - --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ - --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ - --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ - --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ - --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ - --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 +markdown-it-py==3.0.0 \ + --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ + --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb + # via rich +markupsafe==2.1.3 \ + --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ + --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ + --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ + --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ + --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ + --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ + --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ + --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ + --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ + --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ + --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ + --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ + --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ + --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ + --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ + --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ + --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ + --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ + --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ + --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ + --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ + --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ + --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ + --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ + --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ + --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ + --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ + --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ + --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ + --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ + --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ + --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ + --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ + --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ + --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ + --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ + --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ + --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ + --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ + --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ + --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ + --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ + --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ + --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ + --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ + --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ + --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ + --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ + --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ + --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ + --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ + --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ + --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ + --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ + --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ + --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ + --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ + --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ + --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ + --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 # via jinja2 -more-itertools==9.0.0 \ - --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ - --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab +mdurl==0.1.2 \ + --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ + --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba + # via markdown-it-py +more-itertools==10.1.0 \ + --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ + --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 # via jaraco-classes -nox==2022.11.21 \ - --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ - --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 +nh3==0.2.14 \ + --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ + --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ + --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ + --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ + --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ + --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ + --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ + --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ + --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ + --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ + --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ + --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ + --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ + --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ + --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ + --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 + # via readme-renderer +nox==2023.4.22 \ + --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ + --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f # via -r requirements.in -packaging==21.3 \ - --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ - --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 +packaging==23.2 \ + --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ + --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 # via # gcp-releasetool # nox -pkginfo==1.8.3 \ - --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ - --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c +pkginfo==1.9.6 \ + --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ + --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 # via twine -platformdirs==2.5.4 \ - --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ - --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 +platformdirs==3.11.0 \ + --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ + --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv protobuf==3.20.3 \ --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ @@ -383,34 +411,30 @@ protobuf==3.20.3 \ # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.4.8 \ - --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ - --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba +pyasn1==0.5.0 \ + --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ + --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde # via # pyasn1-modules # rsa -pyasn1-modules==0.2.8 \ - --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ - --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 +pyasn1-modules==0.3.0 \ + --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ + --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d # via google-auth pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.15.0 \ - --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ - --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 +pygments==2.16.1 \ + --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ + --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 # via # readme-renderer # rich -pyjwt==2.6.0 \ - --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ - --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 +pyjwt==2.8.0 \ + --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ + --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyparsing==3.0.9 \ - --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ - --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc - # via packaging pyperclip==1.8.2 \ --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 # via gcp-releasetool @@ -418,9 +442,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.3 \ - --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ - --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 +readme-renderer==42.0 \ + --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ + --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 # via twine requests==2.31.0 \ --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ @@ -431,17 +455,17 @@ requests==2.31.0 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.10.1 \ - --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ - --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d +requests-toolbelt==1.0.0 \ + --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ + --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.6.0 \ - --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ - --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 +rich==13.6.0 \ + --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ + --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -455,43 +479,37 @@ six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via - # bleach # gcp-docuploader - # google-auth # python-dateutil -twine==4.0.1 \ - --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ - --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 +twine==4.0.2 \ + --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ + --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 # via -r requirements.in -typing-extensions==4.4.0 \ - --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ - --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e +typing-extensions==4.8.0 \ + --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ + --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef # via -r requirements.in -urllib3==1.26.18 \ - --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ - --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 +urllib3==2.0.7 \ + --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ + --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e # via # requests # twine -virtualenv==20.16.7 \ - --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ - --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 +virtualenv==20.24.6 \ + --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ + --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 # via nox -webencodings==0.5.1 \ - --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ - --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 - # via bleach -wheel==0.38.4 \ - --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ - --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 +wheel==0.41.3 \ + --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ + --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 # via -r requirements.in -zipp==3.10.0 \ - --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ - --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 +zipp==3.17.0 \ + --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ + --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.5.1 \ - --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ - --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f +setuptools==68.2.2 \ + --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ + --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a # via -r requirements.in From 36c1b598b35561e56815ce729884410134c3357d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 29 Nov 2023 15:22:09 -0500 Subject: [PATCH 072/111] feat: Add support for Python 3.12 (#459) * chore(python): Add Python 3.12 Source-Link: https://github.com/googleapis/synthtool/commit/af16e6d4672cc7b400f144de2fc3068b54ff47d2 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 * Add python3.12 to setup.py --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 4 +- .github/workflows/unittest.yml | 2 +- .kokoro/samples/python3.12/common.cfg | 59 ++++++++++++++++++++ .kokoro/samples/python3.12/continuous.cfg | 6 ++ .kokoro/samples/python3.12/periodic-head.cfg | 11 ++++ .kokoro/samples/python3.12/periodic.cfg | 6 ++ .kokoro/samples/python3.12/presubmit.cfg | 6 ++ CONTRIBUTING.rst | 6 +- noxfile.py | 2 +- setup.py | 1 + 10 files changed, 97 insertions(+), 6 deletions(-) create mode 100644 .kokoro/samples/python3.12/common.cfg create mode 100644 .kokoro/samples/python3.12/continuous.cfg create mode 100644 .kokoro/samples/python3.12/periodic-head.cfg create mode 100644 .kokoro/samples/python3.12/periodic.cfg create mode 100644 .kokoro/samples/python3.12/presubmit.cfg diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 453b540c..eb4d9f79 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 -# created: 2023-11-08T19:46:45.022803742Z + digest: sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 +# created: 2023-11-23T18:17:28.105124211Z diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 8057a769..a32027b4 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/.kokoro/samples/python3.12/common.cfg b/.kokoro/samples/python3.12/common.cfg new file mode 100644 index 00000000..67d66075 --- /dev/null +++ b/.kokoro/samples/python3.12/common.cfg @@ -0,0 +1,59 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.12" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-312" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-error-reporting/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-error-reporting/.kokoro/trampoline_v2.sh" + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "error-reporting" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/.kokoro/samples/python3.12/continuous.cfg b/.kokoro/samples/python3.12/continuous.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.12/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.12/periodic-head.cfg b/.kokoro/samples/python3.12/periodic-head.cfg new file mode 100644 index 00000000..0ab001ca --- /dev/null +++ b/.kokoro/samples/python3.12/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-error-reporting/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.12/periodic.cfg b/.kokoro/samples/python3.12/periodic.cfg new file mode 100644 index 00000000..71cd1e59 --- /dev/null +++ b/.kokoro/samples/python3.12/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.12/presubmit.cfg b/.kokoro/samples/python3.12/presubmit.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.12/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index f4f9bf83..5fc13e52 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.11 -- -k + $ nox -s unit-3.12 -- -k .. note:: @@ -226,12 +226,14 @@ We support: - `Python 3.9`_ - `Python 3.10`_ - `Python 3.11`_ +- `Python 3.12`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ .. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/noxfile.py b/noxfile.py index 91389c37..c672b30a 100644 --- a/noxfile.py +++ b/noxfile.py @@ -34,7 +34,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", diff --git a/setup.py b/setup.py index db8b4288..942db61a 100644 --- a/setup.py +++ b/setup.py @@ -82,6 +82,7 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", "Topic :: Internet", ], From a63e3f25ce5ef0cd0077838cdbb6ceff0f15ce31 Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Wed, 29 Nov 2023 16:51:48 -0500 Subject: [PATCH 073/111] feat: use native namespaces instead of pkg_resources (#463) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: use native namespaces instead of pkg_resources * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Fixed comments in test_packaging.py --------- Co-authored-by: Owl Bot --- setup.py | 7 +---- tests/unit/test_packaging.py | 57 ++++++++++++++++++++++++++++++++++++ 2 files changed, 58 insertions(+), 6 deletions(-) create mode 100644 tests/unit/test_packaging.py diff --git a/setup.py b/setup.py index 942db61a..3f28c8d6 100644 --- a/setup.py +++ b/setup.py @@ -54,14 +54,10 @@ packages = [ package - for package in setuptools.PEP420PackageFinder.find() + for package in setuptools.find_namespace_packages() if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") - setuptools.setup( name=name, version=version, @@ -89,7 +85,6 @@ platforms="Posix; MacOS X; Windows", packages=packages, python_requires=">=3.7", - namespace_packages=namespaces, install_requires=dependencies, include_package_data=True, zip_safe=False, diff --git a/tests/unit/test_packaging.py b/tests/unit/test_packaging.py new file mode 100644 index 00000000..01ab6a79 --- /dev/null +++ b/tests/unit/test_packaging.py @@ -0,0 +1,57 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess +import sys + + +def test_namespace_package_compat(tmp_path): + # The ``google`` namespace package should not be masked + # by the presence of ``google-cloud-error-reporting``. + + google = tmp_path / "google" + google.mkdir() + google.joinpath("othermod.py").write_text("") + + google_otherpkg = tmp_path / "google" / "otherpkg" + google_otherpkg.mkdir() + google_otherpkg.joinpath("__init__.py").write_text("") + + # The ``google.cloud`` namespace package should not be masked + # by the presence of ``google-cloud-error-reporting``. + google_cloud = tmp_path / "google" / "cloud" + google_cloud.mkdir() + google_cloud.joinpath("othermod.py").write_text("") + + google_cloud_otherpkg = tmp_path / "google" / "cloud" / "otherpkg" + google_cloud_otherpkg.mkdir() + google_cloud_otherpkg.joinpath("__init__.py").write_text("") + + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + + for pkg in [ + "google.othermod", + "google.cloud.othermod", + "google.otherpkg", + "google.cloud.otherpkg", + "google.cloud.error_reporting", + "google.cloud.errorreporting_v1beta1", + ]: + cmd = [sys.executable, "-c", f"import {pkg}"] + subprocess.check_output(cmd, env=env) + + for module in ["google.othermod", "google.cloud.othermod"]: + cmd = [sys.executable, "-m", module] + subprocess.check_output(cmd, env=env) From 21bcbf2fadfd54c647a9e44867d868680068519d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 29 Nov 2023 18:15:19 -0500 Subject: [PATCH 074/111] chore: bump cryptography from 41.0.5 to 41.0.6 in /synthtool/gcp/templates/python_library/.kokoro (#461) Source-Link: https://github.com/googleapis/synthtool/commit/9367caadcbb30b5b2719f30eb00c44cc913550ed Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/requirements.txt | 48 +++++++++++++++++++-------------------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index eb4d9f79..773c1dfd 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 -# created: 2023-11-23T18:17:28.105124211Z + digest: sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c +# created: 2023-11-29T14:54:29.548172703Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 8957e211..e5c1ffca 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,30 +93,30 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.5 \ - --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ - --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ - --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ - --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ - --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ - --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ - --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ - --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ - --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ - --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ - --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ - --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ - --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ - --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ - --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ - --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ - --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ - --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ - --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ - --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ - --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ - --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ - --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 +cryptography==41.0.6 \ + --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ + --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ + --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ + --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ + --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ + --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ + --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ + --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ + --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ + --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ + --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ + --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ + --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ + --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ + --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ + --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ + --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ + --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ + --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ + --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ + --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ + --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ + --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae # via # gcp-releasetool # secretstorage From 44c2b146aec92e272134ebaa6945fe78f98753bd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Nov 2023 10:08:54 -0500 Subject: [PATCH 075/111] fix: use `retry_async` instead of `retry` in async client (#462) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.12.0 PiperOrigin-RevId: 586356061 Source-Link: https://github.com/googleapis/googleapis/commit/72a1f55abaedbb62decd8ae8a44a4de223799c76 Source-Link: https://github.com/googleapis/googleapis-gen/commit/558a04bcd1cc0576e8fac1089e48e48b27ac161b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTU4YTA0YmNkMWNjMDU3NmU4ZmFjMTA4OWU0OGU0OGIyN2FjMTYxYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update required checks * feat: added Generator API docs: updated doc for speech mode PiperOrigin-RevId: 586469693 Source-Link: https://github.com/googleapis/googleapis/commit/e8148d6d4bb02c907e06a784848ef731acb9e258 Source-Link: https://github.com/googleapis/googleapis-gen/commit/85136bd04383ed7172bb18b7b8d220dd7ff6b3a0 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiODUxMzZiZDA0MzgzZWQ3MTcyYmIxOGI3YjhkMjIwZGQ3ZmY2YjNhMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/sync-repo-settings.yaml | 14 ++++++++++++++ .../services/error_group_service/async_client.py | 10 +++++----- .../services/error_stats_service/async_client.py | 12 ++++++------ .../services/report_errors_service/async_client.py | 8 ++++---- 4 files changed, 29 insertions(+), 15 deletions(-) diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index 37438d33..1ca6d927 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -12,3 +12,17 @@ branchProtectionRules: - 'Samples - Lint' - 'Samples - Python 3.7' - 'Samples - Python 3.8' + - 'Samples - Python 3.9' + - 'Samples - Python 3.10' + - 'Samples - Python 3.11' + - 'Samples - Python 3.12' + - 'docs' + - 'docfx' + - 'lint' + - 'unit (3.7)' + - 'unit (3.8)' + - 'unit (3.9)' + - 'unit (3.10)' + - 'unit (3.11)' + - 'unit (3.12)' + - 'cover' diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py index 0d91c2fd..45a13d3d 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py @@ -33,14 +33,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.cloud.errorreporting_v1beta1.types import common from google.cloud.errorreporting_v1beta1.types import error_group_service @@ -267,7 +267,7 @@ async def sample_get_group(): This corresponds to the ``group_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -371,7 +371,7 @@ async def sample_update_group(): This corresponds to the ``group`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py index bb758420..db0bc3e1 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py @@ -33,14 +33,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.cloud.errorreporting_v1beta1.services.error_stats_service import pagers from google.cloud.errorreporting_v1beta1.types import common @@ -288,7 +288,7 @@ async def sample_list_group_stats(): This corresponds to the ``time_range`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -420,7 +420,7 @@ async def sample_list_events(): This corresponds to the ``group_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -541,7 +541,7 @@ async def sample_delete_events(): This corresponds to the ``project_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py index 929bac2c..40d3deb6 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py @@ -33,14 +33,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.cloud.errorreporting_v1beta1.types import report_errors_service from .transports.base import ReportErrorsServiceTransport, DEFAULT_CLIENT_INFO @@ -289,7 +289,7 @@ async def sample_report_error_event(): This corresponds to the ``event`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be From 83cf8a55f77af0c652b4d6b5fc33fd74ddc9440a Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 30 Nov 2023 13:05:59 -0500 Subject: [PATCH 076/111] build: treat warnings as errors (#464) * build: treat warnings as errors * silence warning from googleapis-common-protos --- pytest.ini | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 pytest.ini diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 00000000..44aa3708 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,13 @@ +[pytest] +filterwarnings = + # treat all warnings as errors + error + # Remove once https://github.com/protocolbuffers/protobuf/issues/12186 is fixed + ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning + # Remove once https://github.com/grpc/grpc/issues/35086 is fixed + ignore:There is no current event loop:DeprecationWarning:grpc.aio._channel + # Remove once release PR https://github.com/googleapis/proto-plus-python/pull/391 is merged + ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:proto.datetime_helpers + # Remove once https://github.com/googleapis/python-api-common-protos/pull/187/files is merged + ignore:.*pkg_resources.declare_namespace:DeprecationWarning + ignore:.*pkg_resources is deprecated as an API:DeprecationWarning From 74955f92feb0cb647bbfb0fcb4b7588b402820ea Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 10 Dec 2023 08:59:55 -0500 Subject: [PATCH 077/111] build: update actions/checkout and actions/setup-python (#467) Source-Link: https://github.com/googleapis/synthtool/commit/3551acd1261fd8f616cbfd054cda9bd6d6ac75f4 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:230f7fe8a0d2ed81a519cfc15c6bb11c5b46b9fb449b8b1219b3771bcb520ad2 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .github/workflows/docs.yml | 8 ++++---- .github/workflows/lint.yml | 4 ++-- .github/workflows/unittest.yml | 8 ++++---- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 773c1dfd..40bf9973 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c -# created: 2023-11-29T14:54:29.548172703Z + digest: sha256:230f7fe8a0d2ed81a519cfc15c6bb11c5b46b9fb449b8b1219b3771bcb520ad2 +# created: 2023-12-09T15:16:25.430769578Z diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 221806ce..698fbc5c 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -8,9 +8,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.9" - name: Install nox @@ -24,9 +24,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install nox diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 16d5a9e9..4866193a 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -8,9 +8,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.8" - name: Install nox diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index a32027b4..d6ca6562 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -11,9 +11,9 @@ jobs: python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - name: Install nox @@ -37,9 +37,9 @@ jobs: - unit steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.8" - name: Install coverage From 986158937dc77abe6a96b03528f7ded6c82f8938 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 11 Dec 2023 14:23:11 -0500 Subject: [PATCH 078/111] chore(main): release 1.10.0 (#449) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 18 ++++++++++++++++++ google/cloud/error_reporting/gapic_version.py | 2 +- .../errorreporting_v1beta1/gapic_version.py | 2 +- ...e.devtools.clouderrorreporting.v1beta1.json | 2 +- 5 files changed, 22 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 50fc4585..ce4f35c2 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.9.2" + ".": "1.10.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 84fe7f1d..e68ae0e2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,24 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## [1.10.0](https://github.com/googleapis/python-error-reporting/compare/v1.9.2...v1.10.0) (2023-12-10) + + +### Features + +* Add support for Python 3.12 ([#459](https://github.com/googleapis/python-error-reporting/issues/459)) ([36c1b59](https://github.com/googleapis/python-error-reporting/commit/36c1b598b35561e56815ce729884410134c3357d)) +* Use native namespaces instead of pkg_resources ([#463](https://github.com/googleapis/python-error-reporting/issues/463)) ([a63e3f2](https://github.com/googleapis/python-error-reporting/commit/a63e3f25ce5ef0cd0077838cdbb6ceff0f15ce31)) + + +### Bug Fixes + +* Use `retry_async` instead of `retry` in async client ([#462](https://github.com/googleapis/python-error-reporting/issues/462)) ([44c2b14](https://github.com/googleapis/python-error-reporting/commit/44c2b146aec92e272134ebaa6945fe78f98753bd)) + + +### Documentation + +* Minor formatting ([#448](https://github.com/googleapis/python-error-reporting/issues/448)) ([48823d4](https://github.com/googleapis/python-error-reporting/commit/48823d4529fc2a2ac7b6c3f745c3ea5cb0ec9d38)) + ## [1.9.2](https://github.com/googleapis/python-error-reporting/compare/v1.9.1...v1.9.2) (2023-07-04) diff --git a/google/cloud/error_reporting/gapic_version.py b/google/cloud/error_reporting/gapic_version.py index ab4e0f59..7ce19170 100644 --- a/google/cloud/error_reporting/gapic_version.py +++ b/google/cloud/error_reporting/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.2" # {x-release-please-version} +__version__ = "1.10.0" # {x-release-please-version} diff --git a/google/cloud/errorreporting_v1beta1/gapic_version.py b/google/cloud/errorreporting_v1beta1/gapic_version.py index ab4e0f59..7ce19170 100644 --- a/google/cloud/errorreporting_v1beta1/gapic_version.py +++ b/google/cloud/errorreporting_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.9.2" # {x-release-please-version} +__version__ = "1.10.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index fedd655c..030caa5c 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "0.1.0" + "version": "1.10.0" }, "snippets": [ { From dd9114a59352eae43b396c84d42ce7e5b4214807 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 14 Dec 2023 19:20:40 -0500 Subject: [PATCH 079/111] build: update actions/upload-artifact and actions/download-artifact (#468) Source-Link: https://github.com/googleapis/synthtool/commit/280ddaed417057dfe5b1395731de07b7d09f5058 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:346ab2efb51649c5dde7756cbbdc60dd394852ba83b9bbffc292a63549f33c17 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .github/workflows/unittest.yml | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 40bf9973..9bee2409 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:230f7fe8a0d2ed81a519cfc15c6bb11c5b46b9fb449b8b1219b3771bcb520ad2 -# created: 2023-12-09T15:16:25.430769578Z + digest: sha256:346ab2efb51649c5dde7756cbbdc60dd394852ba83b9bbffc292a63549f33c17 +# created: 2023-12-14T22:17:57.611773021Z diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index d6ca6562..f4a337c4 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -26,9 +26,9 @@ jobs: run: | nox -s unit-${{ matrix.python }} - name: Upload coverage results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: coverage-artifacts + name: coverage-artifact-${{ matrix.python }} path: .coverage-${{ matrix.python }} cover: @@ -47,11 +47,11 @@ jobs: python -m pip install --upgrade setuptools pip wheel python -m pip install coverage - name: Download coverage results - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: - name: coverage-artifacts path: .coverage-results/ - name: Report coverage results run: | - coverage combine .coverage-results/.coverage* + find .coverage-results -type f -name '*.zip' -exec unzip {} \; + coverage combine .coverage-results/**/.coverage* coverage report --show-missing --fail-under=100 From 7385c7f635bdf92df0882c9e349e868547e5e32d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 24 Jan 2024 10:42:46 -0500 Subject: [PATCH 080/111] build(python): fix `docs` and `docfx` builds (#470) Source-Link: https://github.com/googleapis/synthtool/commit/fac8444edd5f5526e804c306b766a271772a3e2f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 6 +++--- .kokoro/requirements.txt | 6 +++--- noxfile.py | 20 +++++++++++++++++++- 3 files changed, 25 insertions(+), 7 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 9bee2409..d8a1bbca 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:346ab2efb51649c5dde7756cbbdc60dd394852ba83b9bbffc292a63549f33c17 -# created: 2023-12-14T22:17:57.611773021Z + digest: sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa +# created: 2024-01-15T16:32:08.142785673Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index e5c1ffca..bb3d6ca3 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -263,9 +263,9 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.2 \ - --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ - --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 +jinja2==3.1.3 \ + --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ + --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 # via gcp-releasetool keyring==24.2.0 \ --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ diff --git a/noxfile.py b/noxfile.py index c672b30a..5617223b 100644 --- a/noxfile.py +++ b/noxfile.py @@ -281,7 +281,16 @@ def docs(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", "alabaster", "recommonmark", ) @@ -307,6 +316,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", From 87a978934dfea0bf44e1bae02e429f3ac89de74b Mon Sep 17 00:00:00 2001 From: Cindy Peng <148148319+cindy-peng@users.noreply.github.com> Date: Mon, 29 Jan 2024 12:35:54 -0800 Subject: [PATCH 081/111] chore: create flakybot.yaml to change default issue priority (#474) * chore: create flakybot.yaml to change default issue priority * Add google copyright license --------- Co-authored-by: cindy-peng --- .github/flakybot.yaml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 .github/flakybot.yaml diff --git a/.github/flakybot.yaml b/.github/flakybot.yaml new file mode 100644 index 00000000..2159a1bc --- /dev/null +++ b/.github/flakybot.yaml @@ -0,0 +1,15 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +issuePriority: p2 \ No newline at end of file From d3483a92843baeee33c08589462f61b3cd4af487 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 10:59:27 -0500 Subject: [PATCH 082/111] build(deps): bump cryptography from 42.0.2 to 42.0.4 in .kokoro (#485) * build(deps): bump cryptography from 42.0.2 to 42.0.4 in .kokoro Source-Link: https://github.com/googleapis/synthtool/commit/d895aec3679ad22aa120481f746bf9f2f325f26f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad * update warning filter for grpc; remove obsolete warning --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 4 +-- .kokoro/requirements.txt | 57 ++++++++++++++++++++++----------------- pytest.ini | 6 ++--- 3 files changed, 37 insertions(+), 30 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index d8a1bbca..e4e943e0 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa -# created: 2024-01-15T16:32:08.142785673Z + digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad +# created: 2024-02-27T15:56:18.442440378Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index bb3d6ca3..bda8e38c 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,30 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.6 \ - --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ - --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ - --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ - --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ - --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ - --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ - --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ - --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ - --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ - --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ - --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ - --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ - --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ - --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ - --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ - --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ - --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ - --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ - --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ - --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ - --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ - --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ - --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae +cryptography==42.0.4 \ + --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ + --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ + --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ + --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ + --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ + --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ + --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ + --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ + --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ + --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ + --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ + --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ + --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ + --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ + --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ + --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ + --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ + --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ + --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ + --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ + --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ + --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ + --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ + --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ + --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ + --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ + --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ + --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ + --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ + --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ + --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ + --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 # via # gcp-releasetool # secretstorage diff --git a/pytest.ini b/pytest.ini index 44aa3708..5af70b60 100644 --- a/pytest.ini +++ b/pytest.ini @@ -4,10 +4,8 @@ filterwarnings = error # Remove once https://github.com/protocolbuffers/protobuf/issues/12186 is fixed ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning - # Remove once https://github.com/grpc/grpc/issues/35086 is fixed - ignore:There is no current event loop:DeprecationWarning:grpc.aio._channel - # Remove once release PR https://github.com/googleapis/proto-plus-python/pull/391 is merged - ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:proto.datetime_helpers + # Remove warning once https://github.com/grpc/grpc/issues/35974 is fixed + ignore:unclosed:ResourceWarning # Remove once https://github.com/googleapis/python-api-common-protos/pull/187/files is merged ignore:.*pkg_resources.declare_namespace:DeprecationWarning ignore:.*pkg_resources is deprecated as an API:DeprecationWarning From e4c3454bd5ba9f452479b0bc956c6ef011766d14 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 11 Mar 2024 13:04:51 -0400 Subject: [PATCH 083/111] feat: Allow users to explicitly configure universe domain (#475) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Allow users to explicitly configure universe domain chore: Update gapic-generator-python to v1.14.0 PiperOrigin-RevId: 603108274 Source-Link: https://github.com/googleapis/googleapis/commit/3d83e3652f689ab51c3f95f876458c6faef619bf Source-Link: https://github.com/googleapis/googleapis-gen/commit/baf5e9bbb14a768b2b4c9eae9feb78f18f1757fa Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYmFmNWU5YmJiMTRhNzY4YjJiNGM5ZWFlOWZlYjc4ZjE4ZjE3NTdmYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix: Resolve AttributeError 'Credentials' object has no attribute 'universe_domain' fix: Add google-auth as a direct dependency fix: Add staticmethod decorator to methods added in v1.14.0 chore: Update gapic-generator-python to v1.14.1 PiperOrigin-RevId: 603728206 Source-Link: https://github.com/googleapis/googleapis/commit/9063da8b4d45339db4e2d7d92a27c6708620e694 Source-Link: https://github.com/googleapis/googleapis-gen/commit/891c67d0a855b08085eb301dabb14064ef4b2c6d Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiODkxYzY3ZDBhODU1YjA4MDg1ZWIzMDFkYWJiMTQwNjRlZjRiMmM2ZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(diregapic): s/bazel/bazelisk/ in DIREGAPIC build GitHub action PiperOrigin-RevId: 604714585 Source-Link: https://github.com/googleapis/googleapis/commit/e4dce1324f4cb6dedb6822cb157e13cb8e0b3073 Source-Link: https://github.com/googleapis/googleapis-gen/commit/4036f78305c5c2aab80ff91960b3a3d983ff4b03 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDAzNmY3ODMwNWM1YzJhYWI4MGZmOTE5NjBiM2EzZDk4M2ZmNGIwMyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(deps): Require `google-api-core>=1.34.1` fix: Resolve issue with missing import for certain enums in `**/types/…` PiperOrigin-RevId: 607041732 Source-Link: https://github.com/googleapis/googleapis/commit/b4532678459355676c95c00e39866776b7f40b2e Source-Link: https://github.com/googleapis/googleapis-gen/commit/cd796416f0f54cb22b2c44fb2d486960e693a346 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2Q3OTY0MTZmMGY1NGNiMjJiMmM0NGZiMmQ0ODY5NjBlNjkzYTM0NiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(deps): Exclude google-auth 2.24.0 and 2.25.0 chore: Update gapic-generator-python to v1.14.4 PiperOrigin-RevId: 611561820 Source-Link: https://github.com/googleapis/googleapis/commit/87ef1fe57feede1f23b523f3c7fc4c3f2b92d6d2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/197316137594aafad94dea31226528fbcc39310c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTk3MzE2MTM3NTk0YWFmYWQ5NGRlYTMxMjI2NTI4ZmJjYzM5MzEwYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Add include_recaptcha_script for as a new action in firewall policies PiperOrigin-RevId: 612851792 Source-Link: https://github.com/googleapis/googleapis/commit/49ea2c0fc42dd48996b833f05a258ad7e8590d3d Source-Link: https://github.com/googleapis/googleapis-gen/commit/460fdcbbbe00f35b1c591b1f3ef0c77ebd3ce277 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDYwZmRjYmJiZTAwZjM1YjFjNTkxYjFmM2VmMGM3N2ViZDNjZTI3NyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * filter warning in generated tests * fix(deps): require google-auth >= 2.14.1 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../cloud/errorreporting_v1beta1/__init__.py | 2 +- .../services/__init__.py | 2 +- .../services/error_group_service/__init__.py | 2 +- .../error_group_service/async_client.py | 74 ++- .../services/error_group_service/client.py | 298 +++++++++-- .../transports/__init__.py | 2 +- .../error_group_service/transports/base.py | 8 +- .../error_group_service/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../error_group_service/transports/rest.py | 14 +- .../services/error_stats_service/__init__.py | 2 +- .../error_stats_service/async_client.py | 77 ++- .../services/error_stats_service/client.py | 301 +++++++++-- .../services/error_stats_service/pagers.py | 2 +- .../transports/__init__.py | 2 +- .../error_stats_service/transports/base.py | 8 +- .../error_stats_service/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../error_stats_service/transports/rest.py | 11 +- .../report_errors_service/__init__.py | 2 +- .../report_errors_service/async_client.py | 71 ++- .../services/report_errors_service/client.py | 295 ++++++++-- .../transports/__init__.py | 2 +- .../report_errors_service/transports/base.py | 8 +- .../report_errors_service/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../report_errors_service/transports/rest.py | 13 +- .../errorreporting_v1beta1/types/__init__.py | 2 +- .../errorreporting_v1beta1/types/common.py | 2 +- .../types/error_group_service.py | 2 +- .../types/error_stats_service.py | 2 +- .../types/report_errors_service.py | 2 +- pytest.ini | 2 + ...ted_error_group_service_get_group_async.py | 2 +- ...ated_error_group_service_get_group_sync.py | 2 +- ..._error_group_service_update_group_async.py | 2 +- ...d_error_group_service_update_group_sync.py | 2 +- ...error_stats_service_delete_events_async.py | 2 +- ..._error_stats_service_delete_events_sync.py | 2 +- ...d_error_stats_service_list_events_async.py | 2 +- ...ed_error_stats_service_list_events_sync.py | 2 +- ...or_stats_service_list_group_stats_async.py | 2 +- ...ror_stats_service_list_group_stats_sync.py | 2 +- ...errors_service_report_error_event_async.py | 2 +- ..._errors_service_report_error_event_sync.py | 2 +- ....devtools.clouderrorreporting.v1beta1.json | 2 +- .../fixup_errorreporting_v1beta1_keywords.py | 2 +- setup.py | 3 + testing/constraints-3.7.txt | 1 + testing/constraints-3.8.txt | 2 +- tests/__init__.py | 2 +- tests/unit/__init__.py | 2 +- tests/unit/gapic/__init__.py | 2 +- .../gapic/errorreporting_v1beta1/__init__.py | 2 +- .../test_error_group_service.py | 481 +++++++++++++++-- .../test_error_stats_service.py | 503 ++++++++++++++++-- .../test_report_errors_service.py | 489 ++++++++++++++++- 57 files changed, 2418 insertions(+), 329 deletions(-) diff --git a/google/cloud/errorreporting_v1beta1/__init__.py b/google/cloud/errorreporting_v1beta1/__init__.py index e143641a..f2c069f9 100644 --- a/google/cloud/errorreporting_v1beta1/__init__.py +++ b/google/cloud/errorreporting_v1beta1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/__init__.py b/google/cloud/errorreporting_v1beta1/services/__init__.py index 89a37dc9..8f6cf068 100644 --- a/google/cloud/errorreporting_v1beta1/services/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/__init__.py index 96ac56f8..7adebfc3 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py index 45a13d3d..c65c9da4 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -38,9 +38,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.cloud.errorreporting_v1beta1.types import common from google.cloud.errorreporting_v1beta1.types import error_group_service @@ -54,8 +54,12 @@ class ErrorGroupServiceAsyncClient: _client: ErrorGroupServiceClient + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = ErrorGroupServiceClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = ErrorGroupServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ErrorGroupServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ErrorGroupServiceClient._DEFAULT_UNIVERSE error_group_path = staticmethod(ErrorGroupServiceClient.error_group_path) parse_error_group_path = staticmethod( @@ -164,6 +168,25 @@ def transport(self) -> ErrorGroupServiceTransport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial( type(ErrorGroupServiceClient).get_transport_class, type(ErrorGroupServiceClient) ) @@ -176,7 +199,7 @@ def __init__( client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the error group service client. + """Instantiates the error group service async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -187,23 +210,38 @@ def __init__( transport (Union[str, ~.ErrorGroupServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -312,6 +350,9 @@ async def sample_get_group(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -416,6 +457,9 @@ async def sample_update_group(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py index 34b0119b..5176c9c9 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ Union, cast, ) +import warnings from google.cloud.errorreporting_v1beta1 import gapic_version as package_version @@ -42,9 +43,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.cloud.errorreporting_v1beta1.types import common from google.cloud.errorreporting_v1beta1.types import error_group_service @@ -124,11 +125,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "clouderrorreporting.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "clouderrorreporting.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -274,7 +279,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[client_options_lib.ClientOptions] = None ): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -304,6 +309,11 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -337,6 +347,178 @@ def get_mtls_endpoint_and_cert_source( return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ErrorGroupServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ErrorGroupServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ErrorGroupServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ErrorGroupServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ErrorGroupServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ErrorGroupServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__( self, *, @@ -356,22 +538,32 @@ def __init__( transport (Union[str, ErrorGroupServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -382,17 +574,34 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ErrorGroupServiceClient._read_environment_variables() + self._client_cert_source = ErrorGroupServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ErrorGroupServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False - api_key_value = getattr(client_options, "api_key", None) + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( "client_options.api_key and credentials are mutually exclusive" @@ -401,20 +610,33 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, ErrorGroupServiceTransport): + transport_provided = isinstance(transport, ErrorGroupServiceTransport) + if transport_provided: # transport is a ErrorGroupServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(ErrorGroupServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ErrorGroupServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr( @@ -424,17 +646,17 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def get_group( @@ -534,6 +756,9 @@ def sample_get_group(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -638,6 +863,9 @@ def sample_update_group(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py index 91a3752f..ee3446ba 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py index 3cbc6416..23026c11 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -58,7 +58,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'clouderrorreporting.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -121,6 +121,10 @@ def __init__( host += ":443" self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py index d97d8ad8..3f1e94b7 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -65,7 +65,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'clouderrorreporting.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py index 0a1d829b..9c4fd649 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -110,7 +110,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'clouderrorreporting.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py index 71c259c2..db1beb08 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -34,9 +34,9 @@ import warnings try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.cloud.errorreporting_v1beta1.types import common @@ -174,7 +174,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'clouderrorreporting.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -289,7 +289,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -375,9 +374,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -386,7 +383,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/__init__.py index 3eda02c7..a31a509a 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py index db0bc3e1..d4d37ee3 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -38,9 +38,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.cloud.errorreporting_v1beta1.services.error_stats_service import pagers from google.cloud.errorreporting_v1beta1.types import common @@ -57,8 +57,12 @@ class ErrorStatsServiceAsyncClient: _client: ErrorStatsServiceClient + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = ErrorStatsServiceClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = ErrorStatsServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ErrorStatsServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ErrorStatsServiceClient._DEFAULT_UNIVERSE error_group_path = staticmethod(ErrorStatsServiceClient.error_group_path) parse_error_group_path = staticmethod( @@ -167,6 +171,25 @@ def transport(self) -> ErrorStatsServiceTransport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial( type(ErrorStatsServiceClient).get_transport_class, type(ErrorStatsServiceClient) ) @@ -179,7 +202,7 @@ def __init__( client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the error stats service client. + """Instantiates the error stats service async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -190,23 +213,38 @@ def __init__( transport (Union[str, ~.ErrorStatsServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -338,6 +376,9 @@ async def sample_list_group_stats(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -470,6 +511,9 @@ async def sample_list_events(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -586,6 +630,9 @@ async def sample_delete_events(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py index 75722883..f0e61b21 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ Union, cast, ) +import warnings from google.cloud.errorreporting_v1beta1 import gapic_version as package_version @@ -42,9 +43,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.cloud.errorreporting_v1beta1.services.error_stats_service import pagers from google.cloud.errorreporting_v1beta1.types import common @@ -127,11 +128,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "clouderrorreporting.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "clouderrorreporting.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -277,7 +282,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[client_options_lib.ClientOptions] = None ): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -307,6 +312,11 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -340,6 +350,178 @@ def get_mtls_endpoint_and_cert_source( return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ErrorStatsServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ErrorStatsServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ErrorStatsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ErrorStatsServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ErrorStatsServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ErrorStatsServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__( self, *, @@ -359,22 +541,32 @@ def __init__( transport (Union[str, ErrorStatsServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -385,17 +577,34 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ErrorStatsServiceClient._read_environment_variables() + self._client_cert_source = ErrorStatsServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert ) + self._universe_domain = ErrorStatsServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False - api_key_value = getattr(client_options, "api_key", None) + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( "client_options.api_key and credentials are mutually exclusive" @@ -404,20 +613,33 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, ErrorStatsServiceTransport): + transport_provided = isinstance(transport, ErrorStatsServiceTransport) + if transport_provided: # transport is a ErrorStatsServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(ErrorStatsServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ErrorStatsServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr( @@ -427,17 +649,17 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def list_group_stats( @@ -560,6 +782,9 @@ def sample_list_group_stats(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -692,6 +917,9 @@ def sample_list_events(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -808,6 +1036,9 @@ def sample_delete_events(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py index 9170d159..4c333f75 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py index f8a5fe87..69c4f43c 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py index 3899648b..17f38320 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -57,7 +57,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'clouderrorreporting.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -120,6 +120,10 @@ def __init__( host += ":443" self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py index 4e751ed4..55e2925c 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -65,7 +65,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'clouderrorreporting.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py index b9b1be5f..6e284bce 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -110,7 +110,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'clouderrorreporting.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py index b32a340e..f8a18a30 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -34,9 +34,9 @@ import warnings try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.cloud.errorreporting_v1beta1.types import error_stats_service @@ -209,7 +209,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'clouderrorreporting.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -323,7 +323,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -414,7 +413,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -504,7 +502,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/__init__.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/__init__.py index 9b8290ad..d9e40c58 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py index 40d3deb6..03eb9ce0 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -38,9 +38,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.cloud.errorreporting_v1beta1.types import report_errors_service from .transports.base import ReportErrorsServiceTransport, DEFAULT_CLIENT_INFO @@ -53,8 +53,12 @@ class ReportErrorsServiceAsyncClient: _client: ReportErrorsServiceClient + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = ReportErrorsServiceClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = ReportErrorsServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ReportErrorsServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = ReportErrorsServiceClient._DEFAULT_UNIVERSE common_billing_account_path = staticmethod( ReportErrorsServiceClient.common_billing_account_path @@ -159,6 +163,25 @@ def transport(self) -> ReportErrorsServiceTransport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial( type(ReportErrorsServiceClient).get_transport_class, type(ReportErrorsServiceClient), @@ -172,7 +195,7 @@ def __init__( client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the report errors service client. + """Instantiates the report errors service async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -183,23 +206,38 @@ def __init__( transport (Union[str, ~.ReportErrorsServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -337,6 +375,9 @@ async def sample_report_error_event(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py index 60a9b15b..561dbc2a 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,6 +28,7 @@ Union, cast, ) +import warnings from google.cloud.errorreporting_v1beta1 import gapic_version as package_version @@ -42,9 +43,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.cloud.errorreporting_v1beta1.types import report_errors_service from .transports.base import ReportErrorsServiceTransport, DEFAULT_CLIENT_INFO @@ -123,11 +124,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "clouderrorreporting.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "clouderrorreporting.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -256,7 +261,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[client_options_lib.ClientOptions] = None ): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -286,6 +291,11 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -319,6 +329,178 @@ def get_mtls_endpoint_and_cert_source( return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ReportErrorsServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ReportErrorsServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ReportErrorsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ReportErrorsServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = ReportErrorsServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or ReportErrorsServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__( self, *, @@ -338,22 +520,32 @@ def __init__( transport (Union[str, ReportErrorsServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -364,17 +556,34 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = ReportErrorsServiceClient._read_environment_variables() + self._client_cert_source = ReportErrorsServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = ReportErrorsServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env ) + self._api_endpoint = None # updated below, depending on `transport` - api_key_value = getattr(client_options, "api_key", None) + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( "client_options.api_key and credentials are mutually exclusive" @@ -383,20 +592,33 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, ReportErrorsServiceTransport): + transport_provided = isinstance(transport, ReportErrorsServiceTransport) + if transport_provided: # transport is a ReportErrorsServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(ReportErrorsServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ReportErrorsServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr( @@ -406,17 +628,17 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def report_error_event( @@ -545,6 +767,9 @@ def sample_report_error_event(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py index d4d23aaf..bf8256bc 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py index 54a7eda6..052ff4a4 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -57,7 +57,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'clouderrorreporting.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -120,6 +120,10 @@ def __init__( host += ":443" self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py index e81bad5c..5579c5b3 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -64,7 +64,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'clouderrorreporting.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py index d779e686..a3380e25 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -109,7 +109,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'clouderrorreporting.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py index 3a959003..64aa2ae0 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -34,9 +34,9 @@ import warnings try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.cloud.errorreporting_v1beta1.types import report_errors_service @@ -148,7 +148,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'clouderrorreporting.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -263,9 +263,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -274,7 +272,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) diff --git a/google/cloud/errorreporting_v1beta1/types/__init__.py b/google/cloud/errorreporting_v1beta1/types/__init__.py index 9b575327..42ab63d4 100644 --- a/google/cloud/errorreporting_v1beta1/types/__init__.py +++ b/google/cloud/errorreporting_v1beta1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/types/common.py b/google/cloud/errorreporting_v1beta1/types/common.py index 6342410b..d7517ecb 100644 --- a/google/cloud/errorreporting_v1beta1/types/common.py +++ b/google/cloud/errorreporting_v1beta1/types/common.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/types/error_group_service.py b/google/cloud/errorreporting_v1beta1/types/error_group_service.py index daecedde..d38bb52d 100644 --- a/google/cloud/errorreporting_v1beta1/types/error_group_service.py +++ b/google/cloud/errorreporting_v1beta1/types/error_group_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py index cb0a5091..53e4a8f9 100644 --- a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py +++ b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/types/report_errors_service.py b/google/cloud/errorreporting_v1beta1/types/report_errors_service.py index 7d6b5eee..1e719f92 100644 --- a/google/cloud/errorreporting_v1beta1/types/report_errors_service.py +++ b/google/cloud/errorreporting_v1beta1/types/report_errors_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/pytest.ini b/pytest.ini index 5af70b60..1f8283a2 100644 --- a/pytest.ini +++ b/pytest.ini @@ -9,3 +9,5 @@ filterwarnings = # Remove once https://github.com/googleapis/python-api-common-protos/pull/187/files is merged ignore:.*pkg_resources.declare_namespace:DeprecationWarning ignore:.*pkg_resources is deprecated as an API:DeprecationWarning + # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/1939 is fixed + ignore:get_mtls_endpoint_and_cert_source is deprecated.:DeprecationWarning diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py index 702f5cda..dc84897a 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py index 074978d9..e260be1f 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py index 8b345298..62a82368 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py index 7c38b173..7239a97f 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py index a1cd90bf..4bf09138 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py index f8c08c1b..6194de23 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py index 1ec25ba7..49f25ac7 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py index 81c7cc4c..99d4ca32 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py index 60e47686..8ffdf643 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py index 16d09dc4..a696194b 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py index ba590771..5dc4e264 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py index c1fb7124..f75f5591 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index 030caa5c..fedd655c 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "1.10.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/scripts/fixup_errorreporting_v1beta1_keywords.py b/scripts/fixup_errorreporting_v1beta1_keywords.py index 813f0be6..69a5a229 100644 --- a/scripts/fixup_errorreporting_v1beta1_keywords.py +++ b/scripts/fixup_errorreporting_v1beta1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/setup.py b/setup.py index 3f28c8d6..20c081aa 100644 --- a/setup.py +++ b/setup.py @@ -39,6 +39,9 @@ dependencies = [ "google-cloud-logging>=1.14.0, <4.0.0dev", + # Exclude incompatible versions of `google-auth` + # See https://github.com/googleapis/google-cloud-python/issues/12364 + "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 1b92bfe8..95877c43 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -5,6 +5,7 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.0 +google-auth==2.14.1 proto-plus==1.22.0 protobuf==3.19.5 google-cloud-logging==1.14.0 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index ed7f9aed..3f307898 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # This constraints file is required for unit tests. # List all library dependencies and extras in this file. -google-api-core +google-api-core==2.14.0 proto-plus protobuf diff --git a/tests/__init__.py b/tests/__init__.py index 89a37dc9..8f6cf068 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index 89a37dc9..8f6cf068 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py index 89a37dc9..8f6cf068 100644 --- a/tests/unit/gapic/__init__.py +++ b/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/errorreporting_v1beta1/__init__.py b/tests/unit/gapic/errorreporting_v1beta1/__init__.py index 89a37dc9..8f6cf068 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/__init__.py +++ b/tests/unit/gapic/errorreporting_v1beta1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py index ea30d474..83f83a3c 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -29,6 +29,7 @@ import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers from requests import Response @@ -72,6 +73,17 @@ def modify_default_endpoint(client): ) +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -102,6 +114,282 @@ def test__get_default_mtls_endpoint(): ) +def test__read_environment_variables(): + assert ErrorGroupServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ErrorGroupServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ErrorGroupServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ErrorGroupServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ErrorGroupServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ErrorGroupServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ErrorGroupServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ErrorGroupServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ErrorGroupServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ErrorGroupServiceClient._get_client_cert_source(None, False) is None + assert ( + ErrorGroupServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + ErrorGroupServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ErrorGroupServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ErrorGroupServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ErrorGroupServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ErrorGroupServiceClient), +) +@mock.patch.object( + ErrorGroupServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ErrorGroupServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ErrorGroupServiceClient._DEFAULT_UNIVERSE + default_endpoint = ErrorGroupServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ErrorGroupServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ErrorGroupServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ErrorGroupServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ErrorGroupServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ErrorGroupServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ErrorGroupServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == ErrorGroupServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ErrorGroupServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ErrorGroupServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ErrorGroupServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ErrorGroupServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ErrorGroupServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ErrorGroupServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ErrorGroupServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ErrorGroupServiceClient._get_universe_domain(None, None) + == ErrorGroupServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ErrorGroupServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ErrorGroupServiceClient, transports.ErrorGroupServiceGrpcTransport, "grpc"), + (ErrorGroupServiceClient, transports.ErrorGroupServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -217,13 +505,13 @@ def test_error_group_service_client_get_transport_class(): ) @mock.patch.object( ErrorGroupServiceClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ErrorGroupServiceClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ErrorGroupServiceClient), ) @mock.patch.object( ErrorGroupServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ErrorGroupServiceAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ErrorGroupServiceAsyncClient), ) def test_error_group_service_client_client_options( client_class, transport_class, transport_name @@ -265,7 +553,9 @@ def test_error_group_service_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -295,15 +585,23 @@ def test_error_group_service_client_client_options( # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -313,7 +611,9 @@ def test_error_group_service_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -331,7 +631,9 @@ def test_error_group_service_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -384,13 +686,13 @@ def test_error_group_service_client_client_options( ) @mock.patch.object( ErrorGroupServiceClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ErrorGroupServiceClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ErrorGroupServiceClient), ) @mock.patch.object( ErrorGroupServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ErrorGroupServiceAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ErrorGroupServiceAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_error_group_service_client_mtls_env_auto( @@ -413,7 +715,9 @@ def test_error_group_service_client_mtls_env_auto( if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -445,7 +749,9 @@ def test_error_group_service_client_mtls_env_auto( return_value=client_cert_source_callback, ): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -479,7 +785,9 @@ def test_error_group_service_client_mtls_env_auto( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -569,6 +877,115 @@ def test_error_group_service_client_get_mtls_endpoint_and_cert_source(client_cla assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ErrorGroupServiceClient, ErrorGroupServiceAsyncClient] +) +@mock.patch.object( + ErrorGroupServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ErrorGroupServiceClient), +) +@mock.patch.object( + ErrorGroupServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ErrorGroupServiceAsyncClient), +) +def test_error_group_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ErrorGroupServiceClient._DEFAULT_UNIVERSE + default_endpoint = ErrorGroupServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ErrorGroupServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -595,7 +1012,9 @@ def test_error_group_service_client_client_options_scopes( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -640,7 +1059,9 @@ def test_error_group_service_client_client_options_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -700,7 +1121,9 @@ def test_error_group_service_client_create_channel_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -1264,11 +1687,7 @@ def test_get_group_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -1608,11 +2027,7 @@ def test_update_group_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -1857,7 +2272,7 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = ErrorGroupServiceClient( @@ -2580,7 +2995,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py index 29e94f34..0a0a4651 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -29,6 +29,7 @@ import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers from requests import Response @@ -75,6 +76,17 @@ def modify_default_endpoint(client): ) +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -105,6 +117,282 @@ def test__get_default_mtls_endpoint(): ) +def test__read_environment_variables(): + assert ErrorStatsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ErrorStatsServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ErrorStatsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ErrorStatsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ErrorStatsServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ErrorStatsServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ErrorStatsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ErrorStatsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ErrorStatsServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ErrorStatsServiceClient._get_client_cert_source(None, False) is None + assert ( + ErrorStatsServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + ErrorStatsServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ErrorStatsServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ErrorStatsServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ErrorStatsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ErrorStatsServiceClient), +) +@mock.patch.object( + ErrorStatsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ErrorStatsServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ErrorStatsServiceClient._DEFAULT_UNIVERSE + default_endpoint = ErrorStatsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ErrorStatsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ErrorStatsServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ErrorStatsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ErrorStatsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ErrorStatsServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + ErrorStatsServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == ErrorStatsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ErrorStatsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ErrorStatsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ErrorStatsServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ErrorStatsServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ErrorStatsServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ErrorStatsServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ErrorStatsServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ErrorStatsServiceClient._get_universe_domain(None, None) + == ErrorStatsServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ErrorStatsServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (ErrorStatsServiceClient, transports.ErrorStatsServiceGrpcTransport, "grpc"), + (ErrorStatsServiceClient, transports.ErrorStatsServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -220,13 +508,13 @@ def test_error_stats_service_client_get_transport_class(): ) @mock.patch.object( ErrorStatsServiceClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ErrorStatsServiceClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ErrorStatsServiceClient), ) @mock.patch.object( ErrorStatsServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ErrorStatsServiceAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ErrorStatsServiceAsyncClient), ) def test_error_stats_service_client_client_options( client_class, transport_class, transport_name @@ -268,7 +556,9 @@ def test_error_stats_service_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -298,15 +588,23 @@ def test_error_stats_service_client_client_options( # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -316,7 +614,9 @@ def test_error_stats_service_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -334,7 +634,9 @@ def test_error_stats_service_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -387,13 +689,13 @@ def test_error_stats_service_client_client_options( ) @mock.patch.object( ErrorStatsServiceClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ErrorStatsServiceClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ErrorStatsServiceClient), ) @mock.patch.object( ErrorStatsServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ErrorStatsServiceAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ErrorStatsServiceAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_error_stats_service_client_mtls_env_auto( @@ -416,7 +718,9 @@ def test_error_stats_service_client_mtls_env_auto( if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -448,7 +752,9 @@ def test_error_stats_service_client_mtls_env_auto( return_value=client_cert_source_callback, ): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -482,7 +788,9 @@ def test_error_stats_service_client_mtls_env_auto( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -572,6 +880,115 @@ def test_error_stats_service_client_get_mtls_endpoint_and_cert_source(client_cla assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ErrorStatsServiceClient, ErrorStatsServiceAsyncClient] +) +@mock.patch.object( + ErrorStatsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ErrorStatsServiceClient), +) +@mock.patch.object( + ErrorStatsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ErrorStatsServiceAsyncClient), +) +def test_error_stats_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ErrorStatsServiceClient._DEFAULT_UNIVERSE + default_endpoint = ErrorStatsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ErrorStatsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -598,7 +1015,9 @@ def test_error_stats_service_client_client_options_scopes( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -643,7 +1062,9 @@ def test_error_stats_service_client_client_options_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -703,7 +1124,9 @@ def test_error_stats_service_client_create_channel_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -998,7 +1421,7 @@ async def test_list_group_stats_flattened_error_async(): def test_list_group_stats_pager(transport_name: str = "grpc"): client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1048,7 +1471,7 @@ def test_list_group_stats_pager(transport_name: str = "grpc"): def test_list_group_stats_pages(transport_name: str = "grpc"): client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1090,7 +1513,7 @@ def test_list_group_stats_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_group_stats_async_pager(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1142,7 +1565,7 @@ async def test_list_group_stats_async_pager(): @pytest.mark.asyncio async def test_list_group_stats_async_pages(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1432,7 +1855,7 @@ async def test_list_events_flattened_error_async(): def test_list_events_pager(transport_name: str = "grpc"): client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1482,7 +1905,7 @@ def test_list_events_pager(transport_name: str = "grpc"): def test_list_events_pages(transport_name: str = "grpc"): client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1524,7 +1947,7 @@ def test_list_events_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_events_async_pager(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1574,7 +1997,7 @@ async def test_list_events_async_pager(): @pytest.mark.asyncio async def test_list_events_async_pages(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1897,11 +2320,7 @@ def test_list_group_stats_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -2260,11 +2679,7 @@ def test_list_events_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -2624,11 +3039,7 @@ def test_delete_events_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -2878,7 +3289,7 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = ErrorStatsServiceClient( @@ -3605,7 +4016,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py index 1f84c2c1..7a1e1f16 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -29,6 +29,7 @@ import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers from requests import Response @@ -75,6 +76,17 @@ def modify_default_endpoint(client): ) +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -105,6 +117,296 @@ def test__get_default_mtls_endpoint(): ) +def test__read_environment_variables(): + assert ReportErrorsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ReportErrorsServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ReportErrorsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + ReportErrorsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ReportErrorsServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ReportErrorsServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ReportErrorsServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ReportErrorsServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ReportErrorsServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ReportErrorsServiceClient._get_client_cert_source(None, False) is None + assert ( + ReportErrorsServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + ReportErrorsServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ReportErrorsServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ReportErrorsServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ReportErrorsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ReportErrorsServiceClient), +) +@mock.patch.object( + ReportErrorsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ReportErrorsServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ReportErrorsServiceClient._DEFAULT_UNIVERSE + default_endpoint = ReportErrorsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ReportErrorsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ReportErrorsServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ReportErrorsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ReportErrorsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ReportErrorsServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + ReportErrorsServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == ReportErrorsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ReportErrorsServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ReportErrorsServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ReportErrorsServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + ReportErrorsServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ReportErrorsServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ReportErrorsServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ReportErrorsServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ReportErrorsServiceClient._get_universe_domain(None, None) + == ReportErrorsServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ReportErrorsServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ReportErrorsServiceClient, + transports.ReportErrorsServiceGrpcTransport, + "grpc", + ), + ( + ReportErrorsServiceClient, + transports.ReportErrorsServiceRestTransport, + "rest", + ), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -228,13 +530,13 @@ def test_report_errors_service_client_get_transport_class(): ) @mock.patch.object( ReportErrorsServiceClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ReportErrorsServiceClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ReportErrorsServiceClient), ) @mock.patch.object( ReportErrorsServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ReportErrorsServiceAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ReportErrorsServiceAsyncClient), ) def test_report_errors_service_client_client_options( client_class, transport_class, transport_name @@ -276,7 +578,9 @@ def test_report_errors_service_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -306,15 +610,23 @@ def test_report_errors_service_client_client_options( # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -324,7 +636,9 @@ def test_report_errors_service_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -342,7 +656,9 @@ def test_report_errors_service_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -395,13 +711,13 @@ def test_report_errors_service_client_client_options( ) @mock.patch.object( ReportErrorsServiceClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ReportErrorsServiceClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ReportErrorsServiceClient), ) @mock.patch.object( ReportErrorsServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(ReportErrorsServiceAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ReportErrorsServiceAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_report_errors_service_client_mtls_env_auto( @@ -424,7 +740,9 @@ def test_report_errors_service_client_mtls_env_auto( if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -456,7 +774,9 @@ def test_report_errors_service_client_mtls_env_auto( return_value=client_cert_source_callback, ): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -490,7 +810,9 @@ def test_report_errors_service_client_mtls_env_auto( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -580,6 +902,115 @@ def test_report_errors_service_client_get_mtls_endpoint_and_cert_source(client_c assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [ReportErrorsServiceClient, ReportErrorsServiceAsyncClient] +) +@mock.patch.object( + ReportErrorsServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ReportErrorsServiceClient), +) +@mock.patch.object( + ReportErrorsServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ReportErrorsServiceAsyncClient), +) +def test_report_errors_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ReportErrorsServiceClient._DEFAULT_UNIVERSE + default_endpoint = ReportErrorsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ReportErrorsServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -614,7 +1045,9 @@ def test_report_errors_service_client_client_options_scopes( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -659,7 +1092,9 @@ def test_report_errors_service_client_client_options_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -719,7 +1154,9 @@ def test_report_errors_service_client_create_channel_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -1159,11 +1596,7 @@ def test_report_error_event_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -1433,7 +1866,7 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = ReportErrorsServiceClient( @@ -2127,7 +2560,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, From ffa5d8b974847263ba0f097ad9c5d18f7d6a6fe2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 11:21:36 -0400 Subject: [PATCH 084/111] chore: remove nox uninstall/reinstall from python build.sh template (#490) Source-Link: https://github.com/googleapis/synthtool/commit/26358881238150aa51939ccc82b78c0e33d3bc9c Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:007e7e46ef05e5a32e652bd0062be02f6ff050347d91e0f357b28caab0a042c4 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/build.sh | 7 ------- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index e4e943e0..af879fde 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad -# created: 2024-02-27T15:56:18.442440378Z + digest: sha256:007e7e46ef05e5a32e652bd0062be02f6ff050347d91e0f357b28caab0a042c4 +# created: 2024-03-15T14:27:15.879623611Z diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 77234eac..8ede5333 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -33,13 +33,6 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json # Setup project id. export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -# Remove old nox -python3 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3 -m pip install --upgrade --quiet nox -python3 -m nox --version - # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then From e359d8885ebc86691497042f87d1ca9d76afb37e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 14:10:18 -0400 Subject: [PATCH 085/111] chore(python): add requirements for docs build (#491) Source-Link: https://github.com/googleapis/synthtool/commit/85c23b6bc4352c1b0674848eaeb4e48645aeda6b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3741fd1f5f5150378563c76afa06bcc12777b5fe54c5ee01115218f83872134f Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +-- .kokoro/docker/docs/Dockerfile | 4 +++ .kokoro/docker/docs/requirements.in | 1 + .kokoro/docker/docs/requirements.txt | 38 ++++++++++++++++++++++++++++ 4 files changed, 45 insertions(+), 2 deletions(-) create mode 100644 .kokoro/docker/docs/requirements.in create mode 100644 .kokoro/docker/docs/requirements.txt diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index af879fde..5d9542b1 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:007e7e46ef05e5a32e652bd0062be02f6ff050347d91e0f357b28caab0a042c4 -# created: 2024-03-15T14:27:15.879623611Z + digest: sha256:3741fd1f5f5150378563c76afa06bcc12777b5fe54c5ee01115218f83872134f +# created: 2024-03-15T16:26:15.743347415Z diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 8e39a2cc..bdaf39fe 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -80,4 +80,8 @@ RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ # Test pip RUN python3 -m pip +# Install build requirements +COPY requirements.txt /requirements.txt +RUN python3 -m pip install --require-hashes -r requirements.txt + CMD ["python3.8"] diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in new file mode 100644 index 00000000..816817c6 --- /dev/null +++ b/.kokoro/docker/docs/requirements.in @@ -0,0 +1 @@ +nox diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt new file mode 100644 index 00000000..0e5d70f2 --- /dev/null +++ b/.kokoro/docker/docs/requirements.txt @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==3.2.3 \ + --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ + --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c + # via nox +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 + # via nox +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 + # via virtualenv +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c + # via virtualenv +nox==2024.3.2 \ + --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ + --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 + # via -r requirements.in +packaging==24.0 \ + --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ + --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 + # via nox +platformdirs==4.2.0 \ + --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ + --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 + # via virtualenv +virtualenv==20.25.1 \ + --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ + --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 + # via nox From d5f7e80791d4ceb88821ce2fd4599fd4835458c6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 26 Mar 2024 12:16:24 -0400 Subject: [PATCH 086/111] chore: Update gapic-generator-python to v1.16.1 (#493) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.16.1 PiperOrigin-RevId: 618243632 Source-Link: https://github.com/googleapis/googleapis/commit/078a38bd240827be8e69a5b62993380d1b047994 Source-Link: https://github.com/googleapis/googleapis-gen/commit/7af768c3f8ce58994482350f7401173329950a31 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2FmNzY4YzNmOGNlNTg5OTQ0ODIzNTBmNzQwMTE3MzMyOTk1MGEzMSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert changes to testing/constraints-3.8.txt --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- owlbot.py | 1 + .../test_error_group_service.py | 108 +++++++++++- .../test_error_stats_service.py | 166 +++++++++++++++++- .../test_report_errors_service.py | 56 +++++- 4 files changed, 319 insertions(+), 12 deletions(-) diff --git a/owlbot.py b/owlbot.py index 0a02758f..ded8fbe7 100644 --- a/owlbot.py +++ b/owlbot.py @@ -43,6 +43,7 @@ "google/cloud/errorreporting/", "setup.py", "testing/constraints-3.7.txt", + "testing/constraints-3.8.txt", ], ) s.remove_staging_dirs() diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py index 83f83a3c..647c829d 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py @@ -1191,7 +1191,8 @@ def test_get_group(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == error_group_service.GetGroupRequest() + request = error_group_service.GetGroupRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, common.ErrorGroup) @@ -1216,6 +1217,56 @@ def test_get_group_empty_call(): assert args[0] == error_group_service.GetGroupRequest() +def test_get_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = error_group_service.GetGroupRequest( + group_name="group_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_group), "__call__") as call: + client.get_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == error_group_service.GetGroupRequest( + group_name="group_name_value", + ) + + +@pytest.mark.asyncio +async def test_get_group_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ErrorGroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_group), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.ErrorGroup( + name="name_value", + group_id="group_id_value", + resolution_status=common.ResolutionStatus.OPEN, + ) + ) + response = await client.get_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == error_group_service.GetGroupRequest() + + @pytest.mark.asyncio async def test_get_group_async( transport: str = "grpc_asyncio", request_type=error_group_service.GetGroupRequest @@ -1244,7 +1295,8 @@ async def test_get_group_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == error_group_service.GetGroupRequest() + request = error_group_service.GetGroupRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, common.ErrorGroup) @@ -1427,7 +1479,8 @@ def test_update_group(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == error_group_service.UpdateGroupRequest() + request = error_group_service.UpdateGroupRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, common.ErrorGroup) @@ -1452,6 +1505,52 @@ def test_update_group_empty_call(): assert args[0] == error_group_service.UpdateGroupRequest() +def test_update_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = error_group_service.UpdateGroupRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_group), "__call__") as call: + client.update_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == error_group_service.UpdateGroupRequest() + + +@pytest.mark.asyncio +async def test_update_group_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ErrorGroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_group), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.ErrorGroup( + name="name_value", + group_id="group_id_value", + resolution_status=common.ResolutionStatus.OPEN, + ) + ) + response = await client.update_group() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == error_group_service.UpdateGroupRequest() + + @pytest.mark.asyncio async def test_update_group_async( transport: str = "grpc_asyncio", request_type=error_group_service.UpdateGroupRequest @@ -1480,7 +1579,8 @@ async def test_update_group_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == error_group_service.UpdateGroupRequest() + request = error_group_service.UpdateGroupRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, common.ErrorGroup) diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py index 0a0a4651..512b0bb6 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py @@ -1192,7 +1192,8 @@ def test_list_group_stats(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == error_stats_service.ListGroupStatsRequest() + request = error_stats_service.ListGroupStatsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListGroupStatsPager) @@ -1215,6 +1216,56 @@ def test_list_group_stats_empty_call(): assert args[0] == error_stats_service.ListGroupStatsRequest() +def test_list_group_stats_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = error_stats_service.ListGroupStatsRequest( + project_name="project_name_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_group_stats), "__call__") as call: + client.list_group_stats(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == error_stats_service.ListGroupStatsRequest( + project_name="project_name_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_group_stats_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ErrorStatsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_group_stats), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + error_stats_service.ListGroupStatsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_group_stats() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == error_stats_service.ListGroupStatsRequest() + + @pytest.mark.asyncio async def test_list_group_stats_async( transport: str = "grpc_asyncio", @@ -1242,7 +1293,8 @@ async def test_list_group_stats_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == error_stats_service.ListGroupStatsRequest() + request = error_stats_service.ListGroupStatsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListGroupStatsAsyncPager) @@ -1639,7 +1691,8 @@ def test_list_events(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == error_stats_service.ListEventsRequest() + request = error_stats_service.ListEventsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEventsPager) @@ -1662,6 +1715,58 @@ def test_list_events_empty_call(): assert args[0] == error_stats_service.ListEventsRequest() +def test_list_events_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = error_stats_service.ListEventsRequest( + project_name="project_name_value", + group_id="group_id_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_events), "__call__") as call: + client.list_events(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == error_stats_service.ListEventsRequest( + project_name="project_name_value", + group_id="group_id_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_events_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ErrorStatsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_events), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + error_stats_service.ListEventsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_events() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == error_stats_service.ListEventsRequest() + + @pytest.mark.asyncio async def test_list_events_async( transport: str = "grpc_asyncio", request_type=error_stats_service.ListEventsRequest @@ -1688,7 +1793,8 @@ async def test_list_events_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == error_stats_service.ListEventsRequest() + request = error_stats_service.ListEventsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListEventsAsyncPager) @@ -2069,7 +2175,8 @@ def test_delete_events(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == error_stats_service.DeleteEventsRequest() + request = error_stats_service.DeleteEventsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, error_stats_service.DeleteEventsResponse) @@ -2091,6 +2198,52 @@ def test_delete_events_empty_call(): assert args[0] == error_stats_service.DeleteEventsRequest() +def test_delete_events_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = error_stats_service.DeleteEventsRequest( + project_name="project_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_events), "__call__") as call: + client.delete_events(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == error_stats_service.DeleteEventsRequest( + project_name="project_name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_events_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ErrorStatsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_events), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + error_stats_service.DeleteEventsResponse() + ) + response = await client.delete_events() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == error_stats_service.DeleteEventsRequest() + + @pytest.mark.asyncio async def test_delete_events_async( transport: str = "grpc_asyncio", @@ -2116,7 +2269,8 @@ async def test_delete_events_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == error_stats_service.DeleteEventsRequest() + request = error_stats_service.DeleteEventsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, error_stats_service.DeleteEventsResponse) diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py index 7a1e1f16..7904cc87 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py @@ -1222,7 +1222,8 @@ def test_report_error_event(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == report_errors_service.ReportErrorEventRequest() + request = report_errors_service.ReportErrorEventRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, report_errors_service.ReportErrorEventResponse) @@ -1246,6 +1247,56 @@ def test_report_error_event_empty_call(): assert args[0] == report_errors_service.ReportErrorEventRequest() +def test_report_error_event_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ReportErrorsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = report_errors_service.ReportErrorEventRequest( + project_name="project_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_error_event), "__call__" + ) as call: + client.report_error_event(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == report_errors_service.ReportErrorEventRequest( + project_name="project_name_value", + ) + + +@pytest.mark.asyncio +async def test_report_error_event_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = ReportErrorsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.report_error_event), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + report_errors_service.ReportErrorEventResponse() + ) + response = await client.report_error_event() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == report_errors_service.ReportErrorEventRequest() + + @pytest.mark.asyncio async def test_report_error_event_async( transport: str = "grpc_asyncio", @@ -1273,7 +1324,8 @@ async def test_report_error_event_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == report_errors_service.ReportErrorEventRequest() + request = report_errors_service.ReportErrorEventRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, report_errors_service.ReportErrorEventResponse) From 08b9d9f641dc7e02d801066624796e89895836a1 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 16:08:50 -0400 Subject: [PATCH 087/111] chore(main): release 1.11.0 (#489) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 7 +++++++ google/cloud/error_reporting/gapic_version.py | 2 +- google/cloud/errorreporting_v1beta1/gapic_version.py | 2 +- ...tadata_google.devtools.clouderrorreporting.v1beta1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index ce4f35c2..15df9e06 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.10.0" + ".": "1.11.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index e68ae0e2..e4b2e395 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## [1.11.0](https://github.com/googleapis/python-error-reporting/compare/v1.10.0...v1.11.0) (2024-03-26) + + +### Features + +* Allow users to explicitly configure universe domain ([#475](https://github.com/googleapis/python-error-reporting/issues/475)) ([e4c3454](https://github.com/googleapis/python-error-reporting/commit/e4c3454bd5ba9f452479b0bc956c6ef011766d14)) + ## [1.10.0](https://github.com/googleapis/python-error-reporting/compare/v1.9.2...v1.10.0) (2023-12-10) diff --git a/google/cloud/error_reporting/gapic_version.py b/google/cloud/error_reporting/gapic_version.py index 7ce19170..98fb3f58 100644 --- a/google/cloud/error_reporting/gapic_version.py +++ b/google/cloud/error_reporting/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.0" # {x-release-please-version} +__version__ = "1.11.0" # {x-release-please-version} diff --git a/google/cloud/errorreporting_v1beta1/gapic_version.py b/google/cloud/errorreporting_v1beta1/gapic_version.py index 7ce19170..98fb3f58 100644 --- a/google/cloud/errorreporting_v1beta1/gapic_version.py +++ b/google/cloud/errorreporting_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.10.0" # {x-release-please-version} +__version__ = "1.11.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index fedd655c..717892a1 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "0.1.0" + "version": "1.11.0" }, "snippets": [ { From 988def551df3f95577519450470a7cbf5f3b6b7d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 15 Apr 2024 13:28:59 -0400 Subject: [PATCH 088/111] docs: add summary_overview template (#496) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: add summary_overview template Source-Link: https://github.com/googleapis/synthtool/commit/d7c2271d319aeb7e3043ec3f1ecec6f3604f1f1e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:db05f70829de86fe8e34ba972b7fe56da57eaccf1691f875ed4867db80d5cec9 * Added summary_overview.md to hidden toctree in index.rst * Updated .repo-metadata.json * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Kevin Zheng --- .github/.OwlBot.lock.yaml | 4 +- .github/CODEOWNERS | 8 +-- .github/blunderbuss.yml | 17 +++++- .kokoro/requirements.in | 3 +- .kokoro/requirements.txt | 114 +++++++++++++++++--------------------- .repo-metadata.json | 2 +- README.rst | 2 +- docs/index.rst | 5 ++ docs/summary_overview.md | 22 ++++++++ 9 files changed, 104 insertions(+), 73 deletions(-) create mode 100644 docs/summary_overview.md diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 5d9542b1..31897191 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3741fd1f5f5150378563c76afa06bcc12777b5fe54c5ee01115218f83872134f -# created: 2024-03-15T16:26:15.743347415Z + digest: sha256:db05f70829de86fe8e34ba972b7fe56da57eaccf1691f875ed4867db80d5cec9 +# created: 2024-04-05T19:51:26.466869535Z diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 2a3b4205..0738e11e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,8 +5,8 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# @googleapis/yoshi-python @googleapis/api-logging are the default owners for changes in this repo -* @googleapis/yoshi-python @googleapis/api-logging +# @googleapis/yoshi-python @googleapis/api-logging @googleapis/api-logging-partners are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/api-logging @googleapis/api-logging-partners -# @googleapis/python-samples-reviewers @googleapis/api-logging are the default owners for samples changes -/samples/ @googleapis/python-samples-reviewers @googleapis/api-logging +# @googleapis/python-samples-reviewers @googleapis/api-logging @googleapis/api-logging-partners are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/api-logging @googleapis/api-logging-partners diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml index a9d3f44e..8574279a 100644 --- a/.github/blunderbuss.yml +++ b/.github/blunderbuss.yml @@ -1,4 +1,17 @@ +# Blunderbuss config +# +# This file controls who is assigned for pull requests and issues. +# Note: This file is autogenerated. To make changes to the assignee +# team, please update `codeowner_team` in `.repo-metadata.json`. assign_issues: - - googleapis/api-logging-reviewers + - googleapis/api-logging googleapis/api-logging-partners + +assign_issues_by: + - labels: + - "samples" + to: + - googleapis/python-samples-reviewers + - googleapis/api-logging googleapis/api-logging-partners + assign_prs: - - googleapis/api-logging-reviewers + - googleapis/api-logging googleapis/api-logging-partners diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index ec867d9f..fff4d9ce 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x +gcp-releasetool>=2 # required for compatibility with cryptography>=42.x importlib-metadata typing-extensions twine @@ -8,3 +8,4 @@ setuptools nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 +cryptography>=42.0.5 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index bda8e38c..dd61f5f3 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -93,40 +93,41 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.4 \ - --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ - --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ - --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ - --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ - --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ - --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ - --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ - --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ - --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ - --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ - --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ - --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ - --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ - --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ - --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ - --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ - --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ - --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ - --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ - --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ - --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ - --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ - --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ - --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ - --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ - --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ - --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ - --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ - --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ - --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ - --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ - --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 +cryptography==42.0.5 \ + --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ + --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ + --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ + --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ + --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ + --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ + --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ + --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ + --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ + --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ + --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ + --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ + --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ + --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ + --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ + --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ + --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ + --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ + --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ + --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ + --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ + --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ + --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ + --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ + --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ + --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ + --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ + --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ + --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ + --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ + --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ + --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 # via + # -r requirements.in # gcp-releasetool # secretstorage distlib==0.3.7 \ @@ -145,9 +146,9 @@ gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.16.0 \ - --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ - --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 +gcp-releasetool==2.0.0 \ + --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ + --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f # via -r requirements.in google-api-core==2.12.0 \ --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ @@ -392,29 +393,18 @@ platformdirs==3.11.0 \ --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv -protobuf==3.20.3 \ - --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ - --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ - --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ - --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ - --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ - --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ - --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ - --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ - --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ - --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ - --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ - --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ - --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ - --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ - --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ - --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ - --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ - --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ - --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ - --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ - --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ - --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee +protobuf==4.25.3 \ + --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ + --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ + --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ + --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ + --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ + --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ + --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ + --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ + --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ + --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ + --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 # via # gcp-docuploader # gcp-releasetool @@ -518,7 +508,7 @@ zipp==3.17.0 \ # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==68.2.2 \ - --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ - --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a +setuptools==69.2.0 \ + --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ + --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c # via -r requirements.in diff --git a/.repo-metadata.json b/.repo-metadata.json index ea61a2bb..e032093f 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -11,7 +11,7 @@ "distribution_name": "google-cloud-error-reporting", "api_id": "clouderrorreporting.googleapis.com", "requires_billing": false, - "codeowner_team": "@googleapis/api-logging", + "codeowner_team": "@googleapis/api-logging @googleapis/api-logging-partners", "default_version": "v1beta1", "api_shortname": "clouderrorreporting", "api_description": "counts, analyzes and aggregates the crashes in your running cloud services. A centralized error management interface displays the results with sorting and filtering capabilities. A dedicated view shows the error details: time chart, occurrences, affected user count, first and last seen dates and a cleaned exception stack trace. Opt-in to receive email and mobile alerts on new errors." diff --git a/README.rst b/README.rst index 83ec1724..445767c5 100644 --- a/README.rst +++ b/README.rst @@ -15,7 +15,7 @@ Python Client for Error Reporting API .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-error-reporting.svg :target: https://pypi.org/project/google-cloud-error-reporting/ .. _Error Reporting API: https://cloud.google.com/error-reporting -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/clouderrorreporting/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/clouderrorreporting/latest/summary_overview .. _Product Documentation: https://cloud.google.com/error-reporting Quick Start diff --git a/docs/index.rst b/docs/index.rst index cc149749..a33c831a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -32,3 +32,8 @@ For a list of all ``google-cloud-error-reporting`` releases: :maxdepth: 2 changelog + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/docs/summary_overview.md b/docs/summary_overview.md new file mode 100644 index 00000000..dd16632d --- /dev/null +++ b/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Error Reporting API + +Overview of the APIs available for Error Reporting API. + +## All entries + +Classes, methods and properties & attributes for +Error Reporting API. + +[classes](https://cloud.google.com/python/docs/reference/clouderrorreporting/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/clouderrorreporting/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/clouderrorreporting/latest/summary_property.html) From 52c5836a76eff6556a4063b7226c21037e521c64 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 17 Apr 2024 10:30:02 -0400 Subject: [PATCH 089/111] chore(python): bump idna from 3.4 to 3.7 in .kokoro (#500) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): bump idna from 3.4 to 3.7 in .kokoro Source-Link: https://github.com/googleapis/synthtool/commit/d50980e704793a2d3310bfb3664f3a82f24b5796 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> --- .github/.OwlBot.lock.yaml | 4 ++-- .github/blunderbuss.yml | 9 ++++++--- .kokoro/requirements.txt | 6 +++--- 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 31897191..81f87c56 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:db05f70829de86fe8e34ba972b7fe56da57eaccf1691f875ed4867db80d5cec9 -# created: 2024-04-05T19:51:26.466869535Z + digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 +# created: 2024-04-12T11:35:58.922854369Z diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml index 8574279a..d5f69b10 100644 --- a/.github/blunderbuss.yml +++ b/.github/blunderbuss.yml @@ -4,14 +4,17 @@ # Note: This file is autogenerated. To make changes to the assignee # team, please update `codeowner_team` in `.repo-metadata.json`. assign_issues: - - googleapis/api-logging googleapis/api-logging-partners + - googleapis/api-logging + - googleapis/api-logging-partners assign_issues_by: - labels: - "samples" to: - googleapis/python-samples-reviewers - - googleapis/api-logging googleapis/api-logging-partners + - googleapis/api-logging + - googleapis/api-logging-partners assign_prs: - - googleapis/api-logging googleapis/api-logging-partners + - googleapis/api-logging + - googleapis/api-logging-partners diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index dd61f5f3..51f92b8e 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -252,9 +252,9 @@ googleapis-common-protos==1.61.0 \ --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via google-api-core -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 +idna==3.7 \ + --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ + --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests importlib-metadata==6.8.0 \ --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ From bf7736a00716dae58a9b0f63047389750ef51c0d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 5 Jul 2024 10:19:50 -0400 Subject: [PATCH 090/111] chore(python): Use latest python runtime in prerelease_deps session (#512) * chore(python): Use latest python runtime in prerelease_deps session Source-Link: https://github.com/googleapis/synthtool/commit/14d8b284c826cd8501142aeb9ab7e721b630417e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5651442a6336971a2fb2df40fb56b3337df67cafa14c0809cc89cb34ccee1b8e * filter warnings from google-cloud-logging 1.x --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .flake8 | 2 +- .github/.OwlBot.lock.yaml | 4 +- .kokoro/build.sh | 2 +- .kokoro/docker/docs/Dockerfile | 2 +- .kokoro/populate-secrets.sh | 2 +- .kokoro/publish-docs.sh | 2 +- .kokoro/release.sh | 2 +- .kokoro/requirements.txt | 509 ++++++++++++++------------- .kokoro/test-samples-against-head.sh | 2 +- .kokoro/test-samples-impl.sh | 2 +- .kokoro/test-samples.sh | 2 +- .kokoro/trampoline.sh | 2 +- .kokoro/trampoline_v2.sh | 2 +- .pre-commit-config.yaml | 2 +- .trampolinerc | 2 +- MANIFEST.in | 2 +- docs/conf.py | 2 +- noxfile.py | 56 ++- pytest.ini | 2 + scripts/decrypt-secrets.sh | 2 +- scripts/readme-gen/readme_gen.py | 2 +- 21 files changed, 330 insertions(+), 275 deletions(-) diff --git a/.flake8 b/.flake8 index 87f6e408..32986c79 100644 --- a/.flake8 +++ b/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 81f87c56..76524393 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 -# created: 2024-04-12T11:35:58.922854369Z + digest: sha256:5651442a6336971a2fb2df40fb56b3337df67cafa14c0809cc89cb34ccee1b8e +# created: 2024-07-04T19:38:10.086106449Z diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 8ede5333..0c44d59c 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index bdaf39fe..a26ce619 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh index 6f397214..c435402f 100755 --- a/.kokoro/populate-secrets.sh +++ b/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC. +# Copyright 2024 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 9eafe0be..38f083f0 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 716547f1..036f2480 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 51f92b8e..35ece0e4 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -4,21 +4,25 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.1.4 \ - --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ - --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox -attrs==23.1.0 \ - --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ - --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 +attrs==23.2.0 \ + --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ + --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 # via gcp-releasetool -cachetools==5.3.2 \ - --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ - --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 +backports-tarfile==1.2.0 \ + --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ + --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 + # via jaraco-context +cachetools==5.3.3 \ + --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ + --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 +certifi==2024.6.2 \ + --hash=sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516 \ + --hash=sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56 # via requests cffi==1.16.0 \ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ @@ -87,90 +91,90 @@ click==8.0.4 \ # -r requirements.in # gcp-docuploader # gcp-releasetool -colorlog==6.7.0 \ - --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ - --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 # via # gcp-docuploader # nox -cryptography==42.0.5 \ - --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ - --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ - --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ - --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ - --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ - --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ - --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ - --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ - --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ - --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ - --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ - --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ - --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ - --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ - --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ - --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ - --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ - --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ - --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ - --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ - --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ - --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ - --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ - --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ - --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ - --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ - --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ - --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ - --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ - --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ - --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ - --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 +cryptography==42.0.8 \ + --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \ + --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \ + --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \ + --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \ + --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \ + --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \ + --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \ + --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \ + --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \ + --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \ + --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \ + --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \ + --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \ + --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \ + --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \ + --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \ + --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \ + --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \ + --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \ + --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \ + --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \ + --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \ + --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \ + --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \ + --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \ + --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \ + --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \ + --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \ + --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \ + --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \ + --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \ + --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e # via # -r requirements.in # gcp-releasetool # secretstorage -distlib==0.3.7 \ - --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ - --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -docutils==0.20.1 \ - --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ - --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b +docutils==0.21.2 \ + --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ + --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 # via readme-renderer -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==2.0.0 \ - --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ - --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f +gcp-releasetool==2.0.1 \ + --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \ + --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62 # via -r requirements.in -google-api-core==2.12.0 \ - --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ - --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 +google-api-core==2.19.1 \ + --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ + --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd # via # google-cloud-core # google-cloud-storage -google-auth==2.23.4 \ - --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ - --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 +google-auth==2.31.0 \ + --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \ + --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.3 \ - --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ - --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 # via google-cloud-storage -google-cloud-storage==2.13.0 \ - --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ - --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 +google-cloud-storage==2.17.0 \ + --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \ + --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -244,28 +248,36 @@ google-crc32c==1.5.0 \ # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.6.0 \ - --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ - --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b +google-resumable-media==2.7.1 \ + --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \ + --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33 # via google-cloud-storage -googleapis-common-protos==1.61.0 \ - --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ - --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b +googleapis-common-protos==1.63.2 \ + --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ + --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 # via google-api-core idna==3.7 \ --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests -importlib-metadata==6.8.0 \ - --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ - --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 +importlib-metadata==8.0.0 \ + --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ + --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 # via # -r requirements.in # keyring # twine -jaraco-classes==3.3.0 \ - --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ - --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 +jaraco-classes==3.4.0 \ + --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ + --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 + # via keyring +jaraco-context==5.3.0 \ + --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ + --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 + # via keyring +jaraco-functools==4.0.1 \ + --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \ + --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -273,13 +285,13 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.3 \ - --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ - --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 +jinja2==3.1.4 \ + --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ + --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via gcp-releasetool -keyring==24.2.0 \ - --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ - --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 +keyring==25.2.1 \ + --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ + --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b # via # gcp-releasetool # twine @@ -287,146 +299,153 @@ markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb # via rich -markupsafe==2.1.3 \ - --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ - --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ - --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ - --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ - --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ - --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ - --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ - --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ - --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ - --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ - --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ - --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ - --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ - --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ - --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ - --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ - --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ - --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ - --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ - --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ - --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ - --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ - --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ - --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ - --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ - --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ - --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ - --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ - --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ - --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ - --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ - --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ - --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ - --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ - --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ - --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ - --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ - --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ - --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ - --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ - --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ - --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ - --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ - --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ - --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ - --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ - --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ - --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ - --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ - --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ - --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ - --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ - --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ - --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ - --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ - --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ - --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ - --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ - --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ - --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 +markupsafe==2.1.5 \ + --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ + --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ + --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ + --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ + --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ + --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ + --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ + --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ + --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ + --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ + --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ + --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ + --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ + --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ + --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ + --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ + --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ + --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ + --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ + --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ + --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ + --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ + --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ + --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ + --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ + --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ + --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ + --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ + --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ + --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ + --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ + --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ + --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ + --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ + --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ + --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ + --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ + --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ + --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ + --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ + --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ + --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ + --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ + --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ + --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ + --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ + --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ + --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ + --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ + --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ + --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ + --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ + --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ + --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ + --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ + --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ + --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ + --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ + --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ + --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 # via jinja2 mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -more-itertools==10.1.0 \ - --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ - --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 - # via jaraco-classes -nh3==0.2.14 \ - --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ - --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ - --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ - --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ - --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ - --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ - --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ - --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ - --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ - --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ - --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ - --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ - --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ - --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ - --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ - --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 +more-itertools==10.3.0 \ + --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ + --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 + # via + # jaraco-classes + # jaraco-functools +nh3==0.2.17 \ + --hash=sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a \ + --hash=sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911 \ + --hash=sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb \ + --hash=sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a \ + --hash=sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc \ + --hash=sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028 \ + --hash=sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9 \ + --hash=sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3 \ + --hash=sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351 \ + --hash=sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10 \ + --hash=sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71 \ + --hash=sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f \ + --hash=sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b \ + --hash=sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a \ + --hash=sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062 \ + --hash=sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a # via readme-renderer -nox==2023.4.22 \ - --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ - --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==23.2 \ - --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ - --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via # gcp-releasetool # nox -pkginfo==1.9.6 \ - --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ - --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 +pkginfo==1.10.0 \ + --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ + --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 # via twine -platformdirs==3.11.0 \ - --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ - --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -protobuf==4.25.3 \ - --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ - --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ - --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ - --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ - --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ - --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ - --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ - --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ - --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ - --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ - --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 +proto-plus==1.24.0 \ + --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ + --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 + # via google-api-core +protobuf==5.27.2 \ + --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ + --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ + --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ + --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ + --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ + --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ + --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ + --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ + --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ + --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ + --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 # via # gcp-docuploader # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.5.0 \ - --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ - --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde + # proto-plus +pyasn1==0.6.0 \ + --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ + --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 # via # pyasn1-modules # rsa -pyasn1-modules==0.3.0 \ - --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ - --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d +pyasn1-modules==0.4.0 \ + --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ + --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b # via google-auth -pycparser==2.21 \ - --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ - --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 +pycparser==2.22 \ + --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ + --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc # via cffi -pygments==2.16.1 \ - --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ - --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 +pygments==2.18.0 \ + --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ + --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a # via # readme-renderer # rich @@ -434,20 +453,20 @@ pyjwt==2.8.0 \ --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyperclip==1.8.2 \ - --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 +pyperclip==1.9.0 \ + --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 # via gcp-releasetool -python-dateutil==2.8.2 \ - --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ - --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 +python-dateutil==2.9.0.post0 \ + --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ + --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via gcp-releasetool -readme-renderer==42.0 \ - --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ - --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 +readme-renderer==43.0 \ + --hash=sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311 \ + --hash=sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9 # via twine -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 # via # gcp-releasetool # google-api-core @@ -462,9 +481,9 @@ rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==13.6.0 \ - --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ - --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef +rich==13.7.1 \ + --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ + --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -480,35 +499,39 @@ six==1.16.0 \ # via # gcp-docuploader # python-dateutil -twine==4.0.2 \ - --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ - --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +twine==5.1.1 \ + --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ + --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db # via -r requirements.in -typing-extensions==4.8.0 \ - --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ - --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef +typing-extensions==4.12.2 \ + --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ + --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 # via -r requirements.in -urllib3==2.0.7 \ - --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ - --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e +urllib3==2.2.2 \ + --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ + --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 # via # requests # twine -virtualenv==20.24.6 \ - --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ - --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox -wheel==0.41.3 \ - --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ - --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 +wheel==0.43.0 \ + --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ + --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 # via -r requirements.in -zipp==3.17.0 \ - --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ - --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 +zipp==3.19.2 \ + --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ + --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==69.2.0 \ - --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ - --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c +setuptools==70.2.0 \ + --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \ + --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1 # via -r requirements.in diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh index 63ac41df..e9d8bd79 100755 --- a/.kokoro/test-samples-against-head.sh +++ b/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index 5a0f5fab..55910c8b 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index 50b35a48..7933d820 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index d85b1f26..48f79699 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh index 59a7cf3a..35fa5292 100755 --- a/.kokoro/trampoline_v2.sh +++ b/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6a8e1695..1d74695f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.trampolinerc b/.trampolinerc index a7dfeb42..00801523 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/MANIFEST.in b/MANIFEST.in index e0a66705..d6814cd6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/docs/conf.py b/docs/conf.py index 99304e78..fd79abb7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/noxfile.py b/noxfile.py index 5617223b..7b984f6d 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -159,14 +159,28 @@ def install_unittest_dependencies(session, *constraints): session.install("-e", ".", *constraints) -def default(session): +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + # Run py.test against the unit tests. session.run( "py.test", @@ -180,15 +194,12 @@ def default(session): "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. @@ -356,10 +367,17 @@ def docfx(session): ) -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def prerelease_deps(session): +@nox.session(python="3.12") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -394,9 +412,9 @@ def prerelease_deps(session): "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -422,7 +440,13 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -435,6 +459,9 @@ def prerelease_deps(session): f"--junitxml=system_{session.python}_sponge_log.xml", system_test_path, *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) if os.path.exists(system_test_folder_path): session.run( @@ -443,4 +470,7 @@ def prerelease_deps(session): f"--junitxml=system_{session.python}_sponge_log.xml", system_test_folder_path, *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/pytest.ini b/pytest.ini index 1f8283a2..4166e729 100644 --- a/pytest.ini +++ b/pytest.ini @@ -11,3 +11,5 @@ filterwarnings = ignore:.*pkg_resources is deprecated as an API:DeprecationWarning # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/1939 is fixed ignore:get_mtls_endpoint_and_cert_source is deprecated.:DeprecationWarning + # Remove warning once the minium supported version of google-cloud-logging is 2.x.x + ignore:.*Create unlinked descriptors is going to go away. Please use get/find descriptors from generated code or query the descriptor_pool.*:DeprecationWarning diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh index 0018b421..120b0ddc 100755 --- a/scripts/decrypt-secrets.sh +++ b/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2023 Google LLC All rights reserved. +# Copyright 2024 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py index 1acc1198..8f5e248a 100644 --- a/scripts/readme-gen/readme_gen.py +++ b/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From f4b23f581d0e780225a2285c536b420768b63d56 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 9 Jul 2024 13:26:00 -0400 Subject: [PATCH 091/111] chore(python): use python 3.10 for docs build (#516) Source-Link: https://github.com/googleapis/synthtool/commit/9ae07858520bf035a3d5be569b5a65d960ee4392 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 +-- .kokoro/docker/docs/Dockerfile | 21 +++++++------ .kokoro/docker/docs/requirements.txt | 40 +++++++++++++----------- .kokoro/requirements.txt | 46 ++++++++++++++-------------- noxfile.py | 2 +- 5 files changed, 60 insertions(+), 53 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 76524393..f30cb377 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5651442a6336971a2fb2df40fb56b3337df67cafa14c0809cc89cb34ccee1b8e -# created: 2024-07-04T19:38:10.086106449Z + digest: sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e +# created: 2024-07-08T19:25:35.862283192Z diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index a26ce619..5205308b 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:22.04 +from ubuntu:24.04 ENV DEBIAN_FRONTEND noninteractive @@ -40,7 +40,6 @@ RUN apt-get update \ libssl-dev \ libsqlite3-dev \ portaudio19-dev \ - python3-distutils \ redis-server \ software-properties-common \ ssh \ @@ -60,18 +59,22 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.9.13 -# Download python 3.9.13 -RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz +###################### Install python 3.10.14 for docs/docfx session + +# Download python 3.10.14 +RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz # Extract files -RUN tar -xvf Python-3.9.13.tgz +RUN tar -xvf Python-3.10.14.tgz -# Install python 3.9.13 -RUN ./Python-3.9.13/configure --enable-optimizations +# Install python 3.10.14 +RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall +RUN python3.10 -m venv /venv +ENV PATH /venv/bin:$PATH + ###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ && python3 /tmp/get-pip.py \ @@ -84,4 +87,4 @@ RUN python3 -m pip COPY requirements.txt /requirements.txt RUN python3 -m pip install --require-hashes -r requirements.txt -CMD ["python3.8"] +CMD ["python3.10"] diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt index 0e5d70f2..7129c771 100644 --- a/.kokoro/docker/docs/requirements.txt +++ b/.kokoro/docker/docs/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.2.3 \ - --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ - --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox colorlog==6.8.2 \ --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ @@ -16,23 +16,27 @@ distlib==0.3.8 \ --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv -nox==2024.3.2 \ - --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ - --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==24.0 \ - --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ - --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via nox -platformdirs==4.2.0 \ - --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ - --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -virtualenv==20.25.1 \ - --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ - --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 35ece0e4..9622baf0 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.3.3 \ --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth -certifi==2024.6.2 \ - --hash=sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516 \ - --hash=sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56 +certifi==2024.7.4 \ + --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ + --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 # via requests cffi==1.16.0 \ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ @@ -371,23 +371,23 @@ more-itertools==10.3.0 \ # via # jaraco-classes # jaraco-functools -nh3==0.2.17 \ - --hash=sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a \ - --hash=sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911 \ - --hash=sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb \ - --hash=sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a \ - --hash=sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc \ - --hash=sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028 \ - --hash=sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9 \ - --hash=sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3 \ - --hash=sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351 \ - --hash=sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10 \ - --hash=sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71 \ - --hash=sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f \ - --hash=sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b \ - --hash=sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a \ - --hash=sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062 \ - --hash=sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a +nh3==0.2.18 \ + --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \ + --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \ + --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \ + --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \ + --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \ + --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \ + --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \ + --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \ + --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \ + --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \ + --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \ + --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \ + --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \ + --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \ + --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ + --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe # via readme-renderer nox==2024.4.15 \ --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ @@ -460,9 +460,9 @@ python-dateutil==2.9.0.post0 \ --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via gcp-releasetool -readme-renderer==43.0 \ - --hash=sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311 \ - --hash=sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9 +readme-renderer==44.0 \ + --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \ + --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1 # via twine requests==2.32.3 \ --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ diff --git a/noxfile.py b/noxfile.py index 7b984f6d..7540caad 100644 --- a/noxfile.py +++ b/noxfile.py @@ -286,7 +286,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.9") +@nox.session(python="3.10") def docs(session): """Build the docs for this library.""" From 8d3316866c0825911b26f17fd4f703cabbc3c396 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 15 Jul 2024 11:06:35 -0400 Subject: [PATCH 092/111] fix: Allow Protobuf 5.x (#507) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.17.1 PiperOrigin-RevId: 629071173 Source-Link: https://github.com/googleapis/googleapis/commit/4afa392105cc62e965631d15b772ff68454ecf1c Source-Link: https://github.com/googleapis/googleapis-gen/commit/16dbbb4d0457db5e61ac9f99b0d52a46154455ac Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTZkYmJiNGQwNDU3ZGI1ZTYxYWM5Zjk5YjBkNTJhNDYxNTQ0NTVhYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.18.0 PiperOrigin-RevId: 638650618 Source-Link: https://github.com/googleapis/googleapis/commit/6330f0389afdd04235c59898cc44f715b077aa25 Source-Link: https://github.com/googleapis/googleapis-gen/commit/44fa4f1979dc45c1778fd7caf13f8e61c6d1cae8 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDRmYTRmMTk3OWRjNDVjMTc3OGZkN2NhZjEzZjhlNjFjNmQxY2FlOCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat(spanner): Add support for Cloud Spanner Scheduled Backups PiperOrigin-RevId: 649277844 Source-Link: https://github.com/googleapis/googleapis/commit/fd7efa2da3860e813485e63661d3bdd21fc9ba82 Source-Link: https://github.com/googleapis/googleapis-gen/commit/50be251329d8db5b555626ebd4886721f547d3cc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTBiZTI1MTMyOWQ4ZGI1YjU1NTYyNmViZDQ4ODY3MjFmNTQ3ZDNjYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * allow protobuf 5.x * update constraints --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../error_group_service/async_client.py | 52 ++- .../services/error_group_service/client.py | 49 ++- .../error_group_service/transports/base.py | 4 +- .../error_group_service/transports/grpc.py | 28 +- .../transports/grpc_asyncio.py | 44 ++- .../error_stats_service/async_client.py | 69 ++-- .../services/error_stats_service/client.py | 59 +-- .../error_stats_service/transports/base.py | 4 +- .../error_stats_service/transports/grpc.py | 28 +- .../transports/grpc_asyncio.py | 49 ++- .../report_errors_service/async_client.py | 35 +- .../services/report_errors_service/client.py | 39 +- .../report_errors_service/transports/base.py | 4 +- .../report_errors_service/transports/grpc.py | 28 +- .../transports/grpc_asyncio.py | 39 +- ....devtools.clouderrorreporting.v1beta1.json | 2 +- setup.py | 2 +- testing/constraints-3.7.txt | 2 +- .../test_error_group_service.py | 234 +++++++++++ .../test_error_stats_service.py | 370 +++++++++++++++++- .../test_report_errors_service.py | 126 ++++++ 21 files changed, 1075 insertions(+), 192 deletions(-) diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py index c65c9da4..c67a5c32 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -37,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -195,7 +197,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ErrorGroupServiceTransport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, + ErrorGroupServiceTransport, + Callable[..., ErrorGroupServiceTransport], + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -207,9 +215,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.ErrorGroupServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ErrorGroupServiceTransport,Callable[..., ErrorGroupServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ErrorGroupServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -318,8 +328,8 @@ async def sample_get_group(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([group_name]) if request is not None and has_flattened_params: raise ValueError( @@ -327,7 +337,10 @@ async def sample_get_group(): "the individual field arguments should be set." ) - request = error_group_service.GetGroupRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, error_group_service.GetGroupRequest): + request = error_group_service.GetGroupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -336,11 +349,9 @@ async def sample_get_group(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_group, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_group + ] # Certain fields should be provided within the metadata header; # add these here. @@ -425,8 +436,8 @@ async def sample_update_group(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([group]) if request is not None and has_flattened_params: raise ValueError( @@ -434,7 +445,10 @@ async def sample_update_group(): "the individual field arguments should be set." ) - request = error_group_service.UpdateGroupRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, error_group_service.UpdateGroupRequest): + request = error_group_service.UpdateGroupRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -443,11 +457,9 @@ async def sample_update_group(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_group, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_group + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py index 5176c9c9..cba6d6c9 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -523,7 +524,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ErrorGroupServiceTransport]] = None, + transport: Optional[ + Union[ + str, + ErrorGroupServiceTransport, + Callable[..., ErrorGroupServiceTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -535,9 +542,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ErrorGroupServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ErrorGroupServiceTransport,Callable[..., ErrorGroupServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ErrorGroupServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -646,8 +655,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[ErrorGroupServiceTransport], + Callable[..., ErrorGroupServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ErrorGroupServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -724,8 +741,8 @@ def sample_get_group(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([group_name]) if request is not None and has_flattened_params: raise ValueError( @@ -733,10 +750,8 @@ def sample_get_group(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a error_group_service.GetGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, error_group_service.GetGroupRequest): request = error_group_service.GetGroupRequest(request) # If we have keyword arguments corresponding to fields on the @@ -831,8 +846,8 @@ def sample_update_group(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([group]) if request is not None and has_flattened_params: raise ValueError( @@ -840,10 +855,8 @@ def sample_update_group(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a error_group_service.UpdateGroupRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, error_group_service.UpdateGroupRequest): request = error_group_service.UpdateGroupRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py index 23026c11..52b03cea 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py @@ -83,6 +83,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -95,7 +97,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py index 3f1e94b7..793b005b 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py @@ -51,7 +51,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -71,14 +71,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -88,11 +91,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -118,9 +121,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -159,7 +163,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py index 9c4fd649..87365094 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -66,7 +68,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -96,7 +97,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -116,15 +117,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -134,11 +138,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -164,9 +168,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -204,7 +209,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -288,6 +295,21 @@ def update_group( ) return self._stubs["update_group"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.get_group: gapic_v1.method_async.wrap_method( + self.get_group, + default_timeout=None, + client_info=client_info, + ), + self.update_group: gapic_v1.method_async.wrap_method( + self.update_group, + default_timeout=None, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py index d4d37ee3..b1f1ac82 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -37,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -198,7 +200,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ErrorStatsServiceTransport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, + ErrorStatsServiceTransport, + Callable[..., ErrorStatsServiceTransport], + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -210,9 +218,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.ErrorStatsServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ErrorStatsServiceTransport,Callable[..., ErrorStatsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ErrorStatsServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -342,8 +352,8 @@ async def sample_list_group_stats(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_name, time_range]) if request is not None and has_flattened_params: raise ValueError( @@ -351,7 +361,10 @@ async def sample_list_group_stats(): "the individual field arguments should be set." ) - request = error_stats_service.ListGroupStatsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, error_stats_service.ListGroupStatsRequest): + request = error_stats_service.ListGroupStatsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -362,11 +375,9 @@ async def sample_list_group_stats(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_group_stats, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_group_stats + ] # Certain fields should be provided within the metadata header; # add these here. @@ -477,8 +488,8 @@ async def sample_list_events(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_name, group_id]) if request is not None and has_flattened_params: raise ValueError( @@ -486,7 +497,10 @@ async def sample_list_events(): "the individual field arguments should be set." ) - request = error_stats_service.ListEventsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, error_stats_service.ListEventsRequest): + request = error_stats_service.ListEventsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -497,11 +511,9 @@ async def sample_list_events(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_events, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_events + ] # Certain fields should be provided within the metadata header; # add these here. @@ -598,8 +610,8 @@ async def sample_delete_events(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_name]) if request is not None and has_flattened_params: raise ValueError( @@ -607,7 +619,10 @@ async def sample_delete_events(): "the individual field arguments should be set." ) - request = error_stats_service.DeleteEventsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, error_stats_service.DeleteEventsRequest): + request = error_stats_service.DeleteEventsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -616,11 +631,9 @@ async def sample_delete_events(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_events, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_events + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py index f0e61b21..5cce65c5 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -526,7 +527,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ErrorStatsServiceTransport]] = None, + transport: Optional[ + Union[ + str, + ErrorStatsServiceTransport, + Callable[..., ErrorStatsServiceTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -538,9 +545,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ErrorStatsServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ErrorStatsServiceTransport,Callable[..., ErrorStatsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ErrorStatsServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -649,8 +658,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[ErrorStatsServiceTransport], + Callable[..., ErrorStatsServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ErrorStatsServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -748,8 +765,8 @@ def sample_list_group_stats(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_name, time_range]) if request is not None and has_flattened_params: raise ValueError( @@ -757,10 +774,8 @@ def sample_list_group_stats(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a error_stats_service.ListGroupStatsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, error_stats_service.ListGroupStatsRequest): request = error_stats_service.ListGroupStatsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -883,8 +898,8 @@ def sample_list_events(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_name, group_id]) if request is not None and has_flattened_params: raise ValueError( @@ -892,10 +907,8 @@ def sample_list_events(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a error_stats_service.ListEventsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, error_stats_service.ListEventsRequest): request = error_stats_service.ListEventsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1004,8 +1017,8 @@ def sample_delete_events(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_name]) if request is not None and has_flattened_params: raise ValueError( @@ -1013,10 +1026,8 @@ def sample_delete_events(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a error_stats_service.DeleteEventsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, error_stats_service.DeleteEventsRequest): request = error_stats_service.DeleteEventsRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py index 17f38320..2ca8391c 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py @@ -82,6 +82,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -94,7 +96,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py index 55e2925c..b1f71dd3 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py @@ -51,7 +51,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -71,14 +71,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -88,11 +91,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -118,9 +121,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -159,7 +163,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py index 6e284bce..895a1295 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -66,7 +68,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -96,7 +97,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -116,15 +117,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -134,11 +138,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -164,9 +168,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -204,7 +209,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -320,6 +327,26 @@ def delete_events( ) return self._stubs["delete_events"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.list_group_stats: gapic_v1.method_async.wrap_method( + self.list_group_stats, + default_timeout=None, + client_info=client_info, + ), + self.list_events: gapic_v1.method_async.wrap_method( + self.list_events, + default_timeout=None, + client_info=client_info, + ), + self.delete_events: gapic_v1.method_async.wrap_method( + self.delete_events, + default_timeout=None, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py index 03eb9ce0..222a2a65 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -37,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -191,7 +193,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, ReportErrorsServiceTransport] = "grpc_asyncio", + transport: Optional[ + Union[ + str, + ReportErrorsServiceTransport, + Callable[..., ReportErrorsServiceTransport], + ] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -203,9 +211,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.ReportErrorsServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ReportErrorsServiceTransport,Callable[..., ReportErrorsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ReportErrorsServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -341,8 +351,8 @@ async def sample_report_error_event(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_name, event]) if request is not None and has_flattened_params: raise ValueError( @@ -350,7 +360,10 @@ async def sample_report_error_event(): "the individual field arguments should be set." ) - request = report_errors_service.ReportErrorEventRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, report_errors_service.ReportErrorEventRequest): + request = report_errors_service.ReportErrorEventRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -361,11 +374,9 @@ async def sample_report_error_event(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.report_error_event, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.report_error_event + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py index 561dbc2a..001d5c5e 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -505,7 +506,13 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, ReportErrorsServiceTransport]] = None, + transport: Optional[ + Union[ + str, + ReportErrorsServiceTransport, + Callable[..., ReportErrorsServiceTransport], + ] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -517,9 +524,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ReportErrorsServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,ReportErrorsServiceTransport,Callable[..., ReportErrorsServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ReportErrorsServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -628,8 +637,16 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) - self._transport = Transport( + transport_init: Union[ + Type[ReportErrorsServiceTransport], + Callable[..., ReportErrorsServiceTransport], + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ReportErrorsServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -733,8 +750,8 @@ def sample_report_error_event(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project_name, event]) if request is not None and has_flattened_params: raise ValueError( @@ -742,10 +759,8 @@ def sample_report_error_event(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a report_errors_service.ReportErrorEventRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, report_errors_service.ReportErrorEventRequest): request = report_errors_service.ReportErrorEventRequest(request) # If we have keyword arguments corresponding to fields on the diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py index 052ff4a4..9bb43400 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py @@ -82,6 +82,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -94,7 +96,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py index 5579c5b3..be3ec406 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py @@ -50,7 +50,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -70,14 +70,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -87,11 +90,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -117,9 +120,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -158,7 +162,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py index a3380e25..84419879 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -65,7 +67,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -95,7 +96,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -115,15 +116,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -133,11 +137,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -163,9 +167,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -203,7 +208,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -273,6 +280,16 @@ def report_error_event( ) return self._stubs["report_error_event"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.report_error_event: gapic_v1.method_async.wrap_method( + self.report_error_event, + default_timeout=None, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index 717892a1..fedd655c 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "1.11.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/setup.py b/setup.py index 20c081aa..88311343 100644 --- a/setup.py +++ b/setup.py @@ -45,7 +45,7 @@ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-error-reporting" diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 95877c43..76b09648 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -7,5 +7,5 @@ google-api-core==1.34.0 google-auth==2.14.1 proto-plus==1.22.0 -protobuf==3.19.5 +protobuf==3.20.2 google-cloud-logging==1.14.0 diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py index 647c829d..1752363e 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py @@ -1211,6 +1211,9 @@ def test_get_group_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_group), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_group() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1234,6 +1237,9 @@ def test_get_group_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_group), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_group(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1242,6 +1248,41 @@ def test_get_group_non_empty_request_with_auto_populated_field(): ) +def test_get_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_group] = mock_rpc + request = {} + client.get_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_group_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1267,6 +1308,45 @@ async def test_get_group_empty_call_async(): assert args[0] == error_group_service.GetGroupRequest() +@pytest.mark.asyncio +async def test_get_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ErrorGroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_group + ] = mock_object + + request = {} + await client.get_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_group_async( transport: str = "grpc_asyncio", request_type=error_group_service.GetGroupRequest @@ -1499,6 +1579,9 @@ def test_update_group_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_group), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_group() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1520,12 +1603,50 @@ def test_update_group_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_group), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_group(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == error_group_service.UpdateGroupRequest() +def test_update_group_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_group] = mock_rpc + request = {} + client.update_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_group_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1551,6 +1672,47 @@ async def test_update_group_empty_call_async(): assert args[0] == error_group_service.UpdateGroupRequest() +@pytest.mark.asyncio +async def test_update_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ErrorGroupServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_group + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_group + ] = mock_object + + request = {} + await client.update_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_group_async( transport: str = "grpc_asyncio", request_type=error_group_service.UpdateGroupRequest @@ -1777,6 +1939,42 @@ def test_get_group_rest(request_type): assert response.resolution_status == common.ResolutionStatus.OPEN +def test_get_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_group] = mock_rpc + + request = {} + client.get_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_group_rest_required_fields( request_type=error_group_service.GetGroupRequest, ): @@ -2118,6 +2316,42 @@ def get_message_fields(field): assert response.resolution_status == common.ResolutionStatus.OPEN +def test_update_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_group] = mock_rpc + + request = {} + client.update_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_group_rest_required_fields( request_type=error_group_service.UpdateGroupRequest, ): diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py index 512b0bb6..0dd71e55 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py @@ -1210,6 +1210,9 @@ def test_list_group_stats_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_group_stats), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_group_stats() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1234,6 +1237,9 @@ def test_list_group_stats_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_group_stats), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_group_stats(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1243,6 +1249,43 @@ def test_list_group_stats_non_empty_request_with_auto_populated_field(): ) +def test_list_group_stats_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_group_stats in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_group_stats + ] = mock_rpc + request = {} + client.list_group_stats(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_group_stats(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_group_stats_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1266,6 +1309,47 @@ async def test_list_group_stats_empty_call_async(): assert args[0] == error_stats_service.ListGroupStatsRequest() +@pytest.mark.asyncio +async def test_list_group_stats_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ErrorStatsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_group_stats + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_group_stats + ] = mock_object + + request = {} + await client.list_group_stats(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_group_stats(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_group_stats_async( transport: str = "grpc_asyncio", @@ -1508,13 +1592,13 @@ def test_list_group_stats_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project_name", ""),)), ) pager = client.list_group_stats(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -1709,6 +1793,9 @@ def test_list_events_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_events), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_events() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1734,6 +1821,9 @@ def test_list_events_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_events), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_events(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1744,6 +1834,41 @@ def test_list_events_non_empty_request_with_auto_populated_field(): ) +def test_list_events_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_events in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_events] = mock_rpc + request = {} + client.list_events(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_events(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_events_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1767,6 +1892,47 @@ async def test_list_events_empty_call_async(): assert args[0] == error_stats_service.ListEventsRequest() +@pytest.mark.asyncio +async def test_list_events_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ErrorStatsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_events + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_events + ] = mock_object + + request = {} + await client.list_events(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_events(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_events_async( transport: str = "grpc_asyncio", request_type=error_stats_service.ListEventsRequest @@ -1996,13 +2162,13 @@ def test_list_events_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project_name", ""),)), ) pager = client.list_events(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2192,6 +2358,9 @@ def test_delete_events_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_events), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_events() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2215,6 +2384,9 @@ def test_delete_events_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_events), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_events(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2223,6 +2395,41 @@ def test_delete_events_non_empty_request_with_auto_populated_field(): ) +def test_delete_events_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_events in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_events] = mock_rpc + request = {} + client.delete_events(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_events(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_events_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2244,6 +2451,47 @@ async def test_delete_events_empty_call_async(): assert args[0] == error_stats_service.DeleteEventsRequest() +@pytest.mark.asyncio +async def test_delete_events_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ErrorStatsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_events + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_events + ] = mock_object + + request = {} + await client.delete_events(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_events(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_events_async( transport: str = "grpc_asyncio", @@ -2464,6 +2712,44 @@ def test_list_group_stats_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_group_stats_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_group_stats in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_group_stats + ] = mock_rpc + + request = {} + client.list_group_stats(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_group_stats(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_group_stats_rest_required_fields( request_type=error_stats_service.ListGroupStatsRequest, ): @@ -2822,6 +3108,42 @@ def test_list_events_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_events_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_events in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_events] = mock_rpc + + request = {} + client.list_events(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_events(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_events_rest_required_fields( request_type=error_stats_service.ListEventsRequest, ): @@ -3183,6 +3505,42 @@ def test_delete_events_rest(request_type): assert isinstance(response, error_stats_service.DeleteEventsResponse) +def test_delete_events_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_events in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_events] = mock_rpc + + request = {} + client.delete_events(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_events(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_events_rest_required_fields( request_type=error_stats_service.DeleteEventsRequest, ): diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py index 7904cc87..276ba854 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py @@ -1241,6 +1241,9 @@ def test_report_error_event_empty_call(): with mock.patch.object( type(client.transport.report_error_event), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.report_error_event() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1266,6 +1269,9 @@ def test_report_error_event_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.report_error_event), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.report_error_event(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1274,6 +1280,45 @@ def test_report_error_event_non_empty_request_with_auto_populated_field(): ) +def test_report_error_event_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReportErrorsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.report_error_event in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.report_error_event + ] = mock_rpc + request = {} + client.report_error_event(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.report_error_event(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_report_error_event_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1297,6 +1342,47 @@ async def test_report_error_event_empty_call_async(): assert args[0] == report_errors_service.ReportErrorEventRequest() +@pytest.mark.asyncio +async def test_report_error_event_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ReportErrorsServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.report_error_event + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.report_error_event + ] = mock_object + + request = {} + await client.report_error_event(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.report_error_event(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_report_error_event_async( transport: str = "grpc_asyncio", @@ -1638,6 +1724,46 @@ def get_message_fields(field): assert isinstance(response, report_errors_service.ReportErrorEventResponse) +def test_report_error_event_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ReportErrorsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.report_error_event in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.report_error_event + ] = mock_rpc + + request = {} + client.report_error_event(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.report_error_event(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_report_error_event_rest_required_fields( request_type=report_errors_service.ReportErrorEventRequest, ): From 1fef61635e3f9d7297f9fe28d18568cf6c2a0fc1 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 29 Jul 2024 11:58:01 -0400 Subject: [PATCH 093/111] chore: Update gapic-generator-python to v1.18.3 (#517) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Updates documentation with regional resource names for multiple requests docs: Removes references as a "global-only" service PiperOrigin-RevId: 652832022 Source-Link: https://github.com/googleapis/googleapis/commit/72b72389895f0c4da476e04625a45b898952a873 Source-Link: https://github.com/googleapis/googleapis-gen/commit/fa6a37c10040be800a9bfe3286f26785535c3737 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZmE2YTM3YzEwMDQwYmU4MDBhOWJmZTMyODZmMjY3ODU1MzVjMzczNyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.18.3 PiperOrigin-RevId: 655567917 Source-Link: https://github.com/googleapis/googleapis/commit/43aa65e3897557c11d947f3133ddb76e5c4b2a6c Source-Link: https://github.com/googleapis/googleapis-gen/commit/0e38378753074c0f66ff63348d6864929e104d5c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGUzODM3ODc1MzA3NGMwZjY2ZmY2MzM0OGQ2ODY0OTI5ZTEwNGQ1YyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.18.3 PiperOrigin-RevId: 656040068 Source-Link: https://github.com/googleapis/googleapis/commit/3f4e29a88f2e1f412439e61c48c88f81dec0bbbf Source-Link: https://github.com/googleapis/googleapis-gen/commit/b8feb2109dde7b0938c22c993d002251ac6714dc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjhmZWIyMTA5ZGRlN2IwOTM4YzIyYzk5M2QwMDIyNTFhYzY3MTRkYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../error_group_service/async_client.py | 22 ++- .../services/error_group_service/client.py | 22 ++- .../error_group_service/transports/rest.py | 9 ++ .../error_stats_service/async_client.py | 69 ++++++-- .../services/error_stats_service/client.py | 69 ++++++-- .../error_stats_service/transports/rest.py | 12 ++ .../report_errors_service/async_client.py | 17 +- .../services/report_errors_service/client.py | 17 +- .../report_errors_service/transports/grpc.py | 17 +- .../transports/grpc_asyncio.py | 17 +- .../errorreporting_v1beta1/types/common.py | 34 +++- .../types/error_group_service.py | 22 ++- .../types/error_stats_service.py | 147 ++++++++++++------ .../types/report_errors_service.py | 17 +- 14 files changed, 373 insertions(+), 118 deletions(-) diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py index c67a5c32..724b56f0 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py @@ -305,12 +305,26 @@ async def sample_get_group(): The request object. A request to return an individual group. group_name (:class:`str`): - Required. The group resource name. Written as - ``projects/{projectID}/groups/{group_name}``. Call - ```groupStats.list`` `__ + Required. The group resource name. Written as either + ``projects/{projectID}/groups/{group_id}`` or + ``projects/{projectID}/locations/{location}/groups/{group_id}``. + Call [groupStats.list] + [google.devtools.clouderrorreporting.v1beta1.ErrorStatsService.ListGroupStats] to return a list of groups belonging to this project. - Example: ``projects/my-project-123/groups/my-group`` + Examples: ``projects/my-project-123/groups/my-group``, + ``projects/my-project-123/locations/global/groups/my-group`` + + In the group resource name, the ``group_id`` is a unique + identifier for a particular error group. The identifier + is derived from key parts of the error-log content and + is treated as Service Data. For information about how + Service Data is handled, see `Google Cloud Privacy + Notice `__. + + For a list of supported locations, see `Supported + Regions `__. + ``global`` is the default when unspecified. This corresponds to the ``group_name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py index cba6d6c9..f56f11db 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py @@ -718,12 +718,26 @@ def sample_get_group(): The request object. A request to return an individual group. group_name (str): - Required. The group resource name. Written as - ``projects/{projectID}/groups/{group_name}``. Call - ```groupStats.list`` `__ + Required. The group resource name. Written as either + ``projects/{projectID}/groups/{group_id}`` or + ``projects/{projectID}/locations/{location}/groups/{group_id}``. + Call [groupStats.list] + [google.devtools.clouderrorreporting.v1beta1.ErrorStatsService.ListGroupStats] to return a list of groups belonging to this project. - Example: ``projects/my-project-123/groups/my-group`` + Examples: ``projects/my-project-123/groups/my-group``, + ``projects/my-project-123/locations/global/groups/my-group`` + + In the group resource name, the ``group_id`` is a unique + identifier for a particular error group. The identifier + is derived from key parts of the error-log content and + is treated as Service Data. For information about how + Service Data is handled, see `Google Cloud Privacy + Notice `__. + + For a list of supported locations, see `Supported + Regions `__. + ``global`` is the default when unspecified. This corresponds to the ``group_name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py index db1beb08..900d875a 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py @@ -277,6 +277,10 @@ def __call__( "method": "get", "uri": "/v1beta1/{group_name=projects/*/groups/*}", }, + { + "method": "get", + "uri": "/v1beta1/{group_name=projects/*/locations/*/groups/*}", + }, ] request, metadata = self._interceptor.pre_get_group(request, metadata) pb_request = error_group_service.GetGroupRequest.pb(request) @@ -366,6 +370,11 @@ def __call__( "uri": "/v1beta1/{group.name=projects/*/groups/*}", "body": "group", }, + { + "method": "put", + "uri": "/v1beta1/{group.name=projects/*/locations/*/groups/*}", + "body": "group", + }, ] request, metadata = self._interceptor.pre_update_group(request, metadata) pb_request = error_group_service.UpdateGroupRequest.pb(request) diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py index b1f1ac82..702c5bfd 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py @@ -315,10 +315,20 @@ async def sample_list_group_stats(): project. Written as ``projects/{projectID}`` or ``projects/{projectNumber}``, where ``{projectID}`` and ``{projectNumber}`` can be found in the `Google Cloud - Console `__. + console `__. + It may also include a location, such as + ``projects/{projectID}/locations/{location}`` where + ``{location}`` is a cloud region. Examples: ``projects/my-project-123``, - ``projects/5551234``. + ``projects/5551234``, + ``projects/my-project-123/locations/us-central1``, + ``projects/5551234/locations/us-central1``. + + For a list of supported locations, see `Supported + Regions `__. + ``global`` is the default when unspecified. Use ``-`` as + a wildcard to request group stats from all regions. This corresponds to the ``project_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -326,12 +336,20 @@ async def sample_list_group_stats(): time_range (:class:`google.cloud.errorreporting_v1beta1.types.QueryTimeRange`): Optional. List data for the given time range. If not set, a default time range is used. The field - time_range_begin in the response will specify the - beginning of this time range. Only ErrorGroupStats with - a non-zero count in the given time range are returned, - unless the request contains an explicit group_id list. - If a group_id list is given, also ErrorGroupStats with - zero occurrences are returned. + [time_range_begin] + [google.devtools.clouderrorreporting.v1beta1.ListGroupStatsResponse.time_range_begin] + in the response will specify the beginning of this time + range. Only [ErrorGroupStats] + [google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats] + with a non-zero count in the given time range are + returned, unless the request contains an explicit + [group_id] + [google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.group_id] + list. If a [group_id] + [google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.group_id] + list is given, also [ErrorGroupStats] + [google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats] + with zero occurrences are returned. This corresponds to the ``time_range`` field on the ``request`` instance; if ``request`` is provided, this @@ -456,18 +474,30 @@ async def sample_list_events(): return. project_name (:class:`str`): Required. The resource name of the Google Cloud Platform - project. Written as ``projects/{projectID}``, where + project. Written as ``projects/{projectID}`` or + ``projects/{projectID}/locations/{location}``, where ``{projectID}`` is the `Google Cloud Platform project - ID `__. + ID `__ + and ``{location}`` is a Cloud region. + + Examples: ``projects/my-project-123``, + ``projects/my-project-123/locations/global``. - Example: ``projects/my-project-123``. + For a list of supported locations, see `Supported + Regions `__. + ``global`` is the default when unspecified. This corresponds to the ``project_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. group_id (:class:`str`): - Required. The group for which events - shall be returned. + Required. The group for which events shall be returned. + The ``group_id`` is a unique identifier for a particular + error group. The identifier is derived from key parts of + the error-log content and is treated as Service Data. + For information about how Service Data is handled, see + `Google Cloud Privacy + Notice `__. This corresponds to the ``group_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -588,11 +618,18 @@ async def sample_delete_events(): The request object. Deletes all events in the project. project_name (:class:`str`): Required. The resource name of the Google Cloud Platform - project. Written as ``projects/{projectID}``, where + project. Written as ``projects/{projectID}`` or + ``projects/{projectID}/locations/{location}``, where ``{projectID}`` is the `Google Cloud Platform project - ID `__. + ID `__ + and ``{location}`` is a Cloud region. + + Examples: ``projects/my-project-123``, + ``projects/my-project-123/locations/global``. - Example: ``projects/my-project-123``. + For a list of supported locations, see `Supported + Regions `__. + ``global`` is the default when unspecified. This corresponds to the ``project_name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py index 5cce65c5..d6a628ea 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py @@ -728,10 +728,20 @@ def sample_list_group_stats(): project. Written as ``projects/{projectID}`` or ``projects/{projectNumber}``, where ``{projectID}`` and ``{projectNumber}`` can be found in the `Google Cloud - Console `__. + console `__. + It may also include a location, such as + ``projects/{projectID}/locations/{location}`` where + ``{location}`` is a cloud region. Examples: ``projects/my-project-123``, - ``projects/5551234``. + ``projects/5551234``, + ``projects/my-project-123/locations/us-central1``, + ``projects/5551234/locations/us-central1``. + + For a list of supported locations, see `Supported + Regions `__. + ``global`` is the default when unspecified. Use ``-`` as + a wildcard to request group stats from all regions. This corresponds to the ``project_name`` field on the ``request`` instance; if ``request`` is provided, this @@ -739,12 +749,20 @@ def sample_list_group_stats(): time_range (google.cloud.errorreporting_v1beta1.types.QueryTimeRange): Optional. List data for the given time range. If not set, a default time range is used. The field - time_range_begin in the response will specify the - beginning of this time range. Only ErrorGroupStats with - a non-zero count in the given time range are returned, - unless the request contains an explicit group_id list. - If a group_id list is given, also ErrorGroupStats with - zero occurrences are returned. + [time_range_begin] + [google.devtools.clouderrorreporting.v1beta1.ListGroupStatsResponse.time_range_begin] + in the response will specify the beginning of this time + range. Only [ErrorGroupStats] + [google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats] + with a non-zero count in the given time range are + returned, unless the request contains an explicit + [group_id] + [google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.group_id] + list. If a [group_id] + [google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.group_id] + list is given, also [ErrorGroupStats] + [google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats] + with zero occurrences are returned. This corresponds to the ``time_range`` field on the ``request`` instance; if ``request`` is provided, this @@ -866,18 +884,30 @@ def sample_list_events(): return. project_name (str): Required. The resource name of the Google Cloud Platform - project. Written as ``projects/{projectID}``, where + project. Written as ``projects/{projectID}`` or + ``projects/{projectID}/locations/{location}``, where ``{projectID}`` is the `Google Cloud Platform project - ID `__. + ID `__ + and ``{location}`` is a Cloud region. + + Examples: ``projects/my-project-123``, + ``projects/my-project-123/locations/global``. - Example: ``projects/my-project-123``. + For a list of supported locations, see `Supported + Regions `__. + ``global`` is the default when unspecified. This corresponds to the ``project_name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. group_id (str): - Required. The group for which events - shall be returned. + Required. The group for which events shall be returned. + The ``group_id`` is a unique identifier for a particular + error group. The identifier is derived from key parts of + the error-log content and is treated as Service Data. + For information about how Service Data is handled, see + `Google Cloud Privacy + Notice `__. This corresponds to the ``group_id`` field on the ``request`` instance; if ``request`` is provided, this @@ -995,11 +1025,18 @@ def sample_delete_events(): The request object. Deletes all events in the project. project_name (str): Required. The resource name of the Google Cloud Platform - project. Written as ``projects/{projectID}``, where + project. Written as ``projects/{projectID}`` or + ``projects/{projectID}/locations/{location}``, where ``{projectID}`` is the `Google Cloud Platform project - ID `__. + ID `__ + and ``{location}`` is a Cloud region. + + Examples: ``projects/my-project-123``, + ``projects/my-project-123/locations/global``. - Example: ``projects/my-project-123``. + For a list of supported locations, see `Supported + Regions `__. + ``global`` is the default when unspecified. This corresponds to the ``project_name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py index f8a18a30..f7cbbf8f 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py @@ -311,6 +311,10 @@ def __call__( "method": "delete", "uri": "/v1beta1/{project_name=projects/*}/events", }, + { + "method": "delete", + "uri": "/v1beta1/{project_name=projects/*/locations/*}/events", + }, ] request, metadata = self._interceptor.pre_delete_events(request, metadata) pb_request = error_stats_service.DeleteEventsRequest.pb(request) @@ -401,6 +405,10 @@ def __call__( "method": "get", "uri": "/v1beta1/{project_name=projects/*}/events", }, + { + "method": "get", + "uri": "/v1beta1/{project_name=projects/*/locations/*}/events", + }, ] request, metadata = self._interceptor.pre_list_events(request, metadata) pb_request = error_stats_service.ListEventsRequest.pb(request) @@ -488,6 +496,10 @@ def __call__( "method": "get", "uri": "/v1beta1/{project_name=projects/*}/groupStats", }, + { + "method": "get", + "uri": "/v1beta1/{project_name=projects/*/locations/*}/groupStats", + }, ] request, metadata = self._interceptor.pre_list_group_stats( request, metadata diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py index 222a2a65..1dea38b7 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py @@ -280,10 +280,19 @@ async def report_error_event( ``POST https://clouderrorreporting.googleapis.com/v1beta1/{projectName}/events:report?key=123ABC456`` - **Note:** `Error Reporting `__ is a global - service built on Cloud Logging and doesn't analyze logs stored - in regional log buckets or logs routed to other Google Cloud - projects. + **Note:** [Error Reporting] + (https://cloud.google.com/error-reporting) is a service built on + Cloud Logging and can analyze log entries when all of the + following are true: + + - Customer-managed encryption keys (CMEK) are disabled on the + log bucket. + - The log bucket satisfies one of the following: + + - The log bucket is stored in the same project where the + logs originated. + - The logs were routed to a project, and then that project + stored those logs in a log bucket that it owns. .. code-block:: python diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py index 001d5c5e..37efdb0f 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py @@ -679,10 +679,19 @@ def report_error_event( ``POST https://clouderrorreporting.googleapis.com/v1beta1/{projectName}/events:report?key=123ABC456`` - **Note:** `Error Reporting `__ is a global - service built on Cloud Logging and doesn't analyze logs stored - in regional log buckets or logs routed to other Google Cloud - projects. + **Note:** [Error Reporting] + (https://cloud.google.com/error-reporting) is a service built on + Cloud Logging and can analyze log entries when all of the + following are true: + + - Customer-managed encryption keys (CMEK) are disabled on the + log bucket. + - The log bucket satisfies one of the following: + + - The log bucket is stored in the same project where the + logs originated. + - The logs were routed to a project, and then that project + stored those logs in a log bucket that it owns. .. code-block:: python diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py index be3ec406..0cbcc619 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py @@ -253,10 +253,19 @@ def report_error_event( ``POST https://clouderrorreporting.googleapis.com/v1beta1/{projectName}/events:report?key=123ABC456`` - **Note:** `Error Reporting `__ is a global - service built on Cloud Logging and doesn't analyze logs stored - in regional log buckets or logs routed to other Google Cloud - projects. + **Note:** [Error Reporting] + (https://cloud.google.com/error-reporting) is a service built on + Cloud Logging and can analyze log entries when all of the + following are true: + + - Customer-managed encryption keys (CMEK) are disabled on the + log bucket. + - The log bucket satisfies one of the following: + + - The log bucket is stored in the same project where the + logs originated. + - The logs were routed to a project, and then that project + stored those logs in a log bucket that it owns. Returns: Callable[[~.ReportErrorEventRequest], diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py index 84419879..5b3612ba 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py @@ -257,10 +257,19 @@ def report_error_event( ``POST https://clouderrorreporting.googleapis.com/v1beta1/{projectName}/events:report?key=123ABC456`` - **Note:** `Error Reporting `__ is a global - service built on Cloud Logging and doesn't analyze logs stored - in regional log buckets or logs routed to other Google Cloud - projects. + **Note:** [Error Reporting] + (https://cloud.google.com/error-reporting) is a service built on + Cloud Logging and can analyze log entries when all of the + following are true: + + - Customer-managed encryption keys (CMEK) are disabled on the + log bucket. + - The log bucket satisfies one of the following: + + - The log bucket is stored in the same project where the + logs originated. + - The logs were routed to a project, and then that project + stored those logs in a log bucket that it owns. Returns: Callable[[~.ReportErrorEventRequest], diff --git a/google/cloud/errorreporting_v1beta1/types/common.py b/google/cloud/errorreporting_v1beta1/types/common.py index d7517ecb..1278fa58 100644 --- a/google/cloud/errorreporting_v1beta1/types/common.py +++ b/google/cloud/errorreporting_v1beta1/types/common.py @@ -70,18 +70,38 @@ class ErrorGroup(proto.Message): Attributes: name (str): - The group resource name. - Example: - projects/my-project-123/groups/CNSgkpnppqKCUw + The group resource name. Written as + ``projects/{projectID}/groups/{group_id}`` or + ``projects/{projectID}/locations/{location}/groups/{group_id}`` + + Examples: ``projects/my-project-123/groups/my-group``, + ``projects/my-project-123/locations/us-central1/groups/my-group`` + + In the group resource name, the ``group_id`` is a unique + identifier for a particular error group. The identifier is + derived from key parts of the error-log content and is + treated as Service Data. For information about how Service + Data is handled, see `Google Cloud Privacy + Notice `__. + + For a list of supported locations, see `Supported + Regions `__. + ``global`` is the default when unspecified. group_id (str): - Group IDs are unique for a given project. If - the same kind of error occurs in different - service contexts, it will receive the same group - ID. + An opaque identifier of the group. This field is assigned by + the Error Reporting system and always populated. + + In the group resource name, the ``group_id`` is a unique + identifier for a particular error group. The identifier is + derived from key parts of the error-log content and is + treated as Service Data. For information about how Service + Data is handled, see `Google Cloud Privacy + Notice `__. tracking_issues (MutableSequence[google.cloud.errorreporting_v1beta1.types.TrackingIssue]): Associated tracking issues. resolution_status (google.cloud.errorreporting_v1beta1.types.ResolutionStatus): Error group's resolution status. + An unspecified resolution status will be interpreted as OPEN """ diff --git a/google/cloud/errorreporting_v1beta1/types/error_group_service.py b/google/cloud/errorreporting_v1beta1/types/error_group_service.py index d38bb52d..cfd82fa7 100644 --- a/google/cloud/errorreporting_v1beta1/types/error_group_service.py +++ b/google/cloud/errorreporting_v1beta1/types/error_group_service.py @@ -36,12 +36,26 @@ class GetGroupRequest(proto.Message): Attributes: group_name (str): - Required. The group resource name. Written as - ``projects/{projectID}/groups/{group_name}``. Call - ```groupStats.list`` `__ + Required. The group resource name. Written as either + ``projects/{projectID}/groups/{group_id}`` or + ``projects/{projectID}/locations/{location}/groups/{group_id}``. + Call [groupStats.list] + [google.devtools.clouderrorreporting.v1beta1.ErrorStatsService.ListGroupStats] to return a list of groups belonging to this project. - Example: ``projects/my-project-123/groups/my-group`` + Examples: ``projects/my-project-123/groups/my-group``, + ``projects/my-project-123/locations/global/groups/my-group`` + + In the group resource name, the ``group_id`` is a unique + identifier for a particular error group. The identifier is + derived from key parts of the error-log content and is + treated as Service Data. For information about how Service + Data is handled, see `Google Cloud Privacy + Notice `__. + + For a list of supported locations, see `Supported + Regions `__. + ``global`` is the default when unspecified. """ group_name: str = proto.Field( diff --git a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py index 53e4a8f9..93158ab5 100644 --- a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py +++ b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py @@ -53,12 +53,18 @@ class TimedCountAlignment(proto.Enum): ALIGNMENT_EQUAL_ROUNDED (1): The time periods shall be consecutive, have width equal to the requested duration, and be aligned at the - ``alignment_time`` provided in the request. The - ``alignment_time`` does not have to be inside the query - period but even if it is outside, only time periods are - returned which overlap with the query period. A rounded - alignment will typically result in a different size of the - first or the last time period. + [alignment_time] + [google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.alignment_time] + provided in the request. + + The [alignment_time] + [google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.alignment_time] + does not have to be inside the query period but even if it + is outside, only time periods are returned which overlap + with the query period. + + A rounded alignment will typically result in a different + size of the first or the last time period. ALIGNMENT_EQUAL_AT_END (2): The time periods shall be consecutive, have width equal to the requested duration, and be @@ -106,29 +112,54 @@ class ListGroupStatsRequest(proto.Message): project. Written as ``projects/{projectID}`` or ``projects/{projectNumber}``, where ``{projectID}`` and ``{projectNumber}`` can be found in the `Google Cloud - Console `__. - - Examples: ``projects/my-project-123``, ``projects/5551234``. + console `__. + It may also include a location, such as + ``projects/{projectID}/locations/{location}`` where + ``{location}`` is a cloud region. + + Examples: ``projects/my-project-123``, ``projects/5551234``, + ``projects/my-project-123/locations/us-central1``, + ``projects/5551234/locations/us-central1``. + + For a list of supported locations, see `Supported + Regions `__. + ``global`` is the default when unspecified. Use ``-`` as a + wildcard to request group stats from all regions. group_id (MutableSequence[str]): - Optional. List all - ErrorGroupStats with these IDs. + Optional. List all [ErrorGroupStats] + [google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats] + with these IDs. The ``group_id`` is a unique identifier for + a particular error group. The identifier is derived from key + parts of the error-log content and is treated as Service + Data. For information about how Service Data is handled, see + [Google Cloud Privacy Notice] + (https://cloud.google.com/terms/cloud-privacy-notice). service_filter (google.cloud.errorreporting_v1beta1.types.ServiceContextFilter): - Optional. List only - ErrorGroupStats which belong to a - service context that matches the filter. Data - for all service contexts is returned if this - field is not specified. + Optional. List only [ErrorGroupStats] + [google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats] + which belong to a service context that matches the filter. + Data for all service contexts is returned if this field is + not specified. time_range (google.cloud.errorreporting_v1beta1.types.QueryTimeRange): Optional. List data for the given time range. If not set, a - default time range is used. The field time_range_begin in - the response will specify the beginning of this time range. - Only ErrorGroupStats with a non-zero count in the given time - range are returned, unless the request contains an explicit - group_id list. If a group_id list is given, also - ErrorGroupStats with zero occurrences are returned. + default time range is used. The field [time_range_begin] + [google.devtools.clouderrorreporting.v1beta1.ListGroupStatsResponse.time_range_begin] + in the response will specify the beginning of this time + range. Only [ErrorGroupStats] + [google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats] + with a non-zero count in the given time range are returned, + unless the request contains an explicit [group_id] + [google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.group_id] + list. If a [group_id] + [google.devtools.clouderrorreporting.v1beta1.ListGroupStatsRequest.group_id] + list is given, also [ErrorGroupStats] + [google.devtools.clouderrorreporting.v1beta1.ErrorGroupStats] + with zero occurrences are returned. timed_count_duration (google.protobuf.duration_pb2.Duration): Optional. The preferred duration for a single returned - ``TimedCount``. If not set, no timed counts are returned. + [TimedCount] + [google.devtools.clouderrorreporting.v1beta1.TimedCount]. If + not set, no timed counts are returned. alignment (google.cloud.errorreporting_v1beta1.types.TimedCountAlignment): Optional. The alignment of the timed counts to be returned. Default is ``ALIGNMENT_EQUAL_AT_END``. @@ -143,9 +174,11 @@ class ListGroupStatsRequest(proto.Message): Optional. The maximum number of results to return per response. Default is 20. page_token (str): - Optional. A ``next_page_token`` provided by a previous - response. To view additional results, pass this token along - with the identical query parameters as the first request. + Optional. A [next_page_token] + [google.devtools.clouderrorreporting.v1beta1.ListGroupStatsResponse.next_page_token] + provided by a previous response. To view additional results, + pass this token along with the identical query parameters as + the first request. """ project_name: str = proto.Field( @@ -251,15 +284,16 @@ class ErrorGroupStats(proto.Message): affected_users_count (int): Approximate number of affected users in the given group that match the filter criteria. Users are distinguished by data - in the ``ErrorContext`` of the individual error events, such - as their login name or their remote IP address in case of - HTTP requests. The number of affected users can be zero even - if the number of errors is non-zero if no data was provided - from which the affected user could be deduced. Users are - counted based on data in the request context that was - provided in the error report. If more users are implicitly - affected, such as due to a crash of the whole service, this - is not reflected here. + in the [ErrorContext] + [google.devtools.clouderrorreporting.v1beta1.ErrorContext] + of the individual error events, such as their login name or + their remote IP address in case of HTTP requests. The number + of affected users can be zero even if the number of errors + is non-zero if no data was provided from which the affected + user could be deduced. Users are counted based on data in + the request context that was provided in the error report. + If more users are implicitly affected, such as due to a + crash of the whole service, this is not reflected here. timed_counts (MutableSequence[google.cloud.errorreporting_v1beta1.types.TimedCount]): Approximate number of occurrences over time. Timed counts returned by ListGroups are @@ -377,14 +411,26 @@ class ListEventsRequest(proto.Message): Attributes: project_name (str): Required. The resource name of the Google Cloud Platform - project. Written as ``projects/{projectID}``, where + project. Written as ``projects/{projectID}`` or + ``projects/{projectID}/locations/{location}``, where ``{projectID}`` is the `Google Cloud Platform project - ID `__. + ID `__ and + ``{location}`` is a Cloud region. - Example: ``projects/my-project-123``. + Examples: ``projects/my-project-123``, + ``projects/my-project-123/locations/global``. + + For a list of supported locations, see `Supported + Regions `__. + ``global`` is the default when unspecified. group_id (str): - Required. The group for which events shall be - returned. + Required. The group for which events shall be returned. The + ``group_id`` is a unique identifier for a particular error + group. The identifier is derived from key parts of the + error-log content and is treated as Service Data. For + information about how Service Data is handled, see `Google + Cloud Privacy + Notice `__. service_filter (google.cloud.errorreporting_v1beta1.types.ServiceContextFilter): Optional. List only ErrorGroups which belong to a service context that matches the filter. @@ -469,7 +515,13 @@ def raw_page(self): class QueryTimeRange(proto.Message): - r"""Requests might be rejected or the resulting timed count + r"""A time range for which error group data shall be displayed. + Query time ranges end at 'now'. + When longer time ranges are selected, the resolution of the data + decreases. The description of each time range below indicates + the suggested minimum timed count duration for that range. + + Requests might be rejected or the resulting timed count durations might be adjusted for lower durations. Attributes: @@ -555,11 +607,18 @@ class DeleteEventsRequest(proto.Message): Attributes: project_name (str): Required. The resource name of the Google Cloud Platform - project. Written as ``projects/{projectID}``, where + project. Written as ``projects/{projectID}`` or + ``projects/{projectID}/locations/{location}``, where ``{projectID}`` is the `Google Cloud Platform project - ID `__. + ID `__ and + ``{location}`` is a Cloud region. + + Examples: ``projects/my-project-123``, + ``projects/my-project-123/locations/global``. - Example: ``projects/my-project-123``. + For a list of supported locations, see `Supported + Regions `__. + ``global`` is the default when unspecified. """ project_name: str = proto.Field( diff --git a/google/cloud/errorreporting_v1beta1/types/report_errors_service.py b/google/cloud/errorreporting_v1beta1/types/report_errors_service.py index 1e719f92..87c1b14d 100644 --- a/google/cloud/errorreporting_v1beta1/types/report_errors_service.py +++ b/google/cloud/errorreporting_v1beta1/types/report_errors_service.py @@ -72,10 +72,13 @@ class ReportedErrorEvent(proto.Message): Attributes: event_time (google.protobuf.timestamp_pb2.Timestamp): - Optional. Time when the event occurred. - If not provided, the time when the event was - received by the Error Reporting system will be - used. + Optional. Time when the event occurred. If not provided, the + time when the event was received by the Error Reporting + system is used. If provided, the time must not exceed the + `logs retention + period `__ + in the past, or be more than 24 hours in the future. If an + invalid time is provided, then an error is returned. service_context (google.cloud.errorreporting_v1beta1.types.ServiceContext): Required. The service context in which this error has occurred. @@ -99,10 +102,10 @@ class ReportedErrorEvent(proto.Message): ```Exception.backtrace`` `__. - **C#**: Must be the return value of ```Exception.ToString()`` `__. - - **PHP**: Must start with - ``PHP (Notice|Parse error|Fatal error|Warning)`` and + - **PHP**: Must be prefixed with + ``"PHP (Notice|Parse error|Fatal error|Warning): "`` and contain the result of - ```(string)$exception`` `__. + ```(string)$exception`` `__. - **Go**: Must be the return value of ```runtime.Stack()`` `__. context (google.cloud.errorreporting_v1beta1.types.ErrorContext): From cbe41fd1fa4edbb1c574cd2c07dc99d4415c3078 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 31 Jul 2024 14:15:13 -0400 Subject: [PATCH 094/111] fix: Retry and timeout values do not propagate in requests during pagination (#518) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.18.4 PiperOrigin-RevId: 657207628 Source-Link: https://github.com/googleapis/googleapis/commit/33fe71e5a2061402283e0455636a98e5b78eaf7f Source-Link: https://github.com/googleapis/googleapis-gen/commit/e02739d122ed15bd5ef5771c57f12a83d47a1dda Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTAyNzM5ZDEyMmVkMTViZDVlZjU3NzFjNTdmMTJhODNkNDdhMWRkYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../error_stats_service/async_client.py | 4 ++ .../services/error_stats_service/client.py | 4 ++ .../services/error_stats_service/pagers.py | 69 +++++++++++++++++-- .../test_error_group_service.py | 1 + .../test_error_stats_service.py | 13 +++- .../test_report_errors_service.py | 1 + 6 files changed, 86 insertions(+), 6 deletions(-) diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py index 702c5bfd..2ba6b9e2 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py @@ -422,6 +422,8 @@ async def sample_list_group_stats(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -570,6 +572,8 @@ async def sample_list_events(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py index d6a628ea..6356e9d5 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py @@ -832,6 +832,8 @@ def sample_list_group_stats(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -977,6 +979,8 @@ def sample_list_events(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py index 4c333f75..274f7ac2 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.cloud.errorreporting_v1beta1.types import common from google.cloud.errorreporting_v1beta1.types import error_stats_service @@ -52,6 +65,8 @@ def __init__( request: error_stats_service.ListGroupStatsRequest, response: error_stats_service.ListGroupStatsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -63,12 +78,17 @@ def __init__( The initial request object. response (google.cloud.errorreporting_v1beta1.types.ListGroupStatsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = error_stats_service.ListGroupStatsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -79,7 +99,12 @@ def pages(self) -> Iterator[error_stats_service.ListGroupStatsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[error_stats_service.ErrorGroupStats]: @@ -114,6 +139,8 @@ def __init__( request: error_stats_service.ListGroupStatsRequest, response: error_stats_service.ListGroupStatsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -125,12 +152,17 @@ def __init__( The initial request object. response (google.cloud.errorreporting_v1beta1.types.ListGroupStatsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = error_stats_service.ListGroupStatsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -141,7 +173,12 @@ async def pages(self) -> AsyncIterator[error_stats_service.ListGroupStatsRespons yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[error_stats_service.ErrorGroupStats]: @@ -180,6 +217,8 @@ def __init__( request: error_stats_service.ListEventsRequest, response: error_stats_service.ListEventsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -191,12 +230,17 @@ def __init__( The initial request object. response (google.cloud.errorreporting_v1beta1.types.ListEventsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = error_stats_service.ListEventsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -207,7 +251,12 @@ def pages(self) -> Iterator[error_stats_service.ListEventsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[common.ErrorEvent]: @@ -242,6 +291,8 @@ def __init__( request: error_stats_service.ListEventsRequest, response: error_stats_service.ListEventsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -253,12 +304,17 @@ def __init__( The initial request object. response (google.cloud.errorreporting_v1beta1.types.ListEventsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = error_stats_service.ListEventsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -269,7 +325,12 @@ async def pages(self) -> AsyncIterator[error_stats_service.ListEventsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[common.ErrorEvent]: diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py index 1752363e..e80b7c10 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py @@ -43,6 +43,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.errorreporting_v1beta1.services.error_group_service import ( diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py index 0dd71e55..3312037b 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py @@ -43,6 +43,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.errorreporting_v1beta1.services.error_stats_service import ( @@ -1593,12 +1594,16 @@ def test_list_group_stats_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project_name", ""),)), ) - pager = client.list_group_stats(request={}) + pager = client.list_group_stats(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -2163,12 +2168,16 @@ def test_list_events_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project_name", ""),)), ) - pager = client.list_events(request={}) + pager = client.list_events(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py index 276ba854..559a5787 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py @@ -43,6 +43,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.errorreporting_v1beta1.services.report_errors_service import ( From 8e661d75346da9b2711334ee3bc7f19197e3e7d4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 11:21:01 -0400 Subject: [PATCH 095/111] chore(python): fix docs build (#519) Source-Link: https://github.com/googleapis/synthtool/commit/bef813d194de29ddf3576eda60148b6b3dcc93d9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:94bb690db96e6242b2567a4860a94d48fa48696d092e51b0884a1a2c0a79a407 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/docker/docs/Dockerfile | 9 ++++----- .kokoro/publish-docs.sh | 20 ++++++++++---------- 3 files changed, 16 insertions(+), 17 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index f30cb377..6d064ddb 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e -# created: 2024-07-08T19:25:35.862283192Z + digest: sha256:94bb690db96e6242b2567a4860a94d48fa48696d092e51b0884a1a2c0a79a407 +# created: 2024-07-31T14:52:44.926548819Z diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 5205308b..e5410e29 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -72,19 +72,18 @@ RUN tar -xvf Python-3.10.14.tgz RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall -RUN python3.10 -m venv /venv -ENV PATH /venv/bin:$PATH +ENV PATH /usr/local/bin/python3.10:$PATH ###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3 /tmp/get-pip.py \ + && python3.10 /tmp/get-pip.py \ && rm /tmp/get-pip.py # Test pip -RUN python3 -m pip +RUN python3.10 -m pip # Install build requirements COPY requirements.txt /requirements.txt -RUN python3 -m pip install --require-hashes -r requirements.txt +RUN python3.10 -m pip install --require-hashes -r requirements.txt CMD ["python3.10"] diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 38f083f0..233205d5 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -21,18 +21,18 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3 -m pip install --require-hashes -r .kokoro/requirements.txt -python3 -m nox --version +python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt +python3.10 -m nox --version # build docs nox -s docs # create metadata -python3 -m docuploader create-metadata \ +python3.10 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ + --version=$(python3.10 setup.py --version) \ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ + --distribution-name=$(python3.10 setup.py --name) \ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) @@ -40,18 +40,18 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" +python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" # docfx yaml files nox -s docfx # create metadata. -python3 -m docuploader create-metadata \ +python3.10 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ + --version=$(python3.10 setup.py --version) \ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ + --distribution-name=$(python3.10 setup.py --name) \ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) @@ -59,4 +59,4 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" +python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" From 436a211065545c58f5589d13bfd8c74518b5ccb2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 26 Aug 2024 11:22:08 -0400 Subject: [PATCH 096/111] chore: Update gapic-generator-python to v1.18.5 (#521) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.18.5 PiperOrigin-RevId: 661268868 Source-Link: https://github.com/googleapis/googleapis/commit/f7d214cb08cd7d9b018d44564a8b184263f64177 Source-Link: https://github.com/googleapis/googleapis-gen/commit/79a8411bbdb25a983fa3aae8c0e14327df129f94 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNzlhODQxMWJiZGIyNWE5ODNmYTNhYWU4YzBlMTQzMjdkZjEyOWY5NCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../error_group_service/async_client.py | 5 +--- .../services/error_group_service/client.py | 2 +- .../error_stats_service/async_client.py | 5 +--- .../services/error_stats_service/client.py | 2 +- .../report_errors_service/async_client.py | 6 +---- .../services/report_errors_service/client.py | 2 +- .../test_error_group_service.py | 18 +++++++------ .../test_error_stats_service.py | 27 ++++++++++--------- .../test_report_errors_service.py | 9 ++++--- 9 files changed, 36 insertions(+), 40 deletions(-) diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py index 724b56f0..21361c25 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Dict, @@ -189,9 +188,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ErrorGroupServiceClient).get_transport_class, type(ErrorGroupServiceClient) - ) + get_transport_class = ErrorGroupServiceClient.get_transport_class def __init__( self, diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py index f56f11db..97cb9157 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py @@ -659,7 +659,7 @@ def __init__( Type[ErrorGroupServiceTransport], Callable[..., ErrorGroupServiceTransport], ] = ( - type(self).get_transport_class(transport) + ErrorGroupServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ErrorGroupServiceTransport], transport) ) diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py index 2ba6b9e2..232049f7 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Dict, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ErrorStatsServiceClient).get_transport_class, type(ErrorStatsServiceClient) - ) + get_transport_class = ErrorStatsServiceClient.get_transport_class def __init__( self, diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py index 6356e9d5..2f128d1a 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py @@ -662,7 +662,7 @@ def __init__( Type[ErrorStatsServiceTransport], Callable[..., ErrorStatsServiceTransport], ] = ( - type(self).get_transport_class(transport) + ErrorStatsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ErrorStatsServiceTransport], transport) ) diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py index 1dea38b7..6c0ef660 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Dict, @@ -184,10 +183,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(ReportErrorsServiceClient).get_transport_class, - type(ReportErrorsServiceClient), - ) + get_transport_class = ReportErrorsServiceClient.get_transport_class def __init__( self, diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py index 37efdb0f..212c5811 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py @@ -641,7 +641,7 @@ def __init__( Type[ReportErrorsServiceTransport], Callable[..., ReportErrorsServiceTransport], ] = ( - type(self).get_transport_class(transport) + ReportErrorsServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., ReportErrorsServiceTransport], transport) ) diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py index e80b7c10..e6e0089b 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py @@ -1330,22 +1330,23 @@ async def test_get_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_group - ] = mock_object + ] = mock_rpc request = {} await client.get_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1696,22 +1697,23 @@ async def test_update_group_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_group - ] = mock_object + ] = mock_rpc request = {} await client.update_group(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py index 3312037b..9241fa39 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py @@ -1333,22 +1333,23 @@ async def test_list_group_stats_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_group_stats - ] = mock_object + ] = mock_rpc request = {} await client.list_group_stats(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_group_stats(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1920,22 +1921,23 @@ async def test_list_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_events - ] = mock_object + ] = mock_rpc request = {} await client.list_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2483,22 +2485,23 @@ async def test_delete_events_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_events - ] = mock_object + ] = mock_rpc request = {} await client.delete_events(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_events(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py index 559a5787..ca1f397f 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py @@ -1366,22 +1366,23 @@ async def test_report_error_event_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.report_error_event - ] = mock_object + ] = mock_rpc request = {} await client.report_error_event(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.report_error_event(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From f593c1b0f88b38b94503ecef9dc6e741109c4ada Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 16 Sep 2024 17:28:03 -0400 Subject: [PATCH 097/111] chore(python): update unittest workflow template (#523) Source-Link: https://github.com/googleapis/synthtool/commit/e6f91eb4db419b02af74197905b99fa00a6030c0 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:365d92ef2206cfad00a8c5955c36789d0de124e2b6d92a72dd0486315a0f2e57 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- .github/workflows/unittest.yml | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 6d064ddb..f8bd8149 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:94bb690db96e6242b2567a4860a94d48fa48696d092e51b0884a1a2c0a79a407 -# created: 2024-07-31T14:52:44.926548819Z + digest: sha256:365d92ef2206cfad00a8c5955c36789d0de124e2b6d92a72dd0486315a0f2e57 +# created: 2024-09-04T14:50:52.658171431Z diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index f4a337c4..dd8bd769 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -30,6 +30,7 @@ jobs: with: name: coverage-artifact-${{ matrix.python }} path: .coverage-${{ matrix.python }} + include-hidden-files: true cover: runs-on: ubuntu-latest From 664e9d502d6a58504acbf02935afc4a2a7571574 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 17 Sep 2024 11:12:48 -0400 Subject: [PATCH 098/111] build(python): release script update (#526) Source-Link: https://github.com/googleapis/synthtool/commit/71a72973dddbc66ea64073b53eda49f0d22e0942 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e8dcfd7cbfd8beac3a3ff8d3f3185287ea0625d859168cc80faccfc9a7a00455 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 4 ++-- .kokoro/release.sh | 2 +- .kokoro/release/common.cfg | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index f8bd8149..597e0c32 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:365d92ef2206cfad00a8c5955c36789d0de124e2b6d92a72dd0486315a0f2e57 -# created: 2024-09-04T14:50:52.658171431Z + digest: sha256:e8dcfd7cbfd8beac3a3ff8d3f3185287ea0625d859168cc80faccfc9a7a00455 +# created: 2024-09-16T21:04:09.091105552Z diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 036f2480..0b483792 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -23,7 +23,7 @@ python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source / export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2") cd github/python-error-reporting python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 5c1ce9b6..346d4892 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -28,7 +28,7 @@ before_action { fetch_keystore { keystore_resource { keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-1" + keyname: "google-cloud-pypi-token-keystore-2" } } } From 5fcbcbb64fa9b4c66eeedeaf5b63630e8f65a454 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 10:49:35 -0400 Subject: [PATCH 099/111] chore(main): release 1.11.1 (#503) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 15 +++++++++++++++ google/cloud/error_reporting/gapic_version.py | 2 +- .../cloud/errorreporting_v1beta1/gapic_version.py | 2 +- ...ogle.devtools.clouderrorreporting.v1beta1.json | 2 +- 5 files changed, 19 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 15df9e06..c42d93fc 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.11.0" + ".": "1.11.1" } diff --git a/CHANGELOG.md b/CHANGELOG.md index e4b2e395..d3385b60 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,21 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## [1.11.1](https://github.com/googleapis/python-error-reporting/compare/v1.11.0...v1.11.1) (2024-09-17) + + +### Bug Fixes + +* Allow Protobuf 5.x ([#507](https://github.com/googleapis/python-error-reporting/issues/507)) ([8d33168](https://github.com/googleapis/python-error-reporting/commit/8d3316866c0825911b26f17fd4f703cabbc3c396)) +* Retry and timeout values do not propagate in requests during pagination ([#518](https://github.com/googleapis/python-error-reporting/issues/518)) ([cbe41fd](https://github.com/googleapis/python-error-reporting/commit/cbe41fd1fa4edbb1c574cd2c07dc99d4415c3078)) + + +### Documentation + +* Add summary_overview template ([#496](https://github.com/googleapis/python-error-reporting/issues/496)) ([988def5](https://github.com/googleapis/python-error-reporting/commit/988def551df3f95577519450470a7cbf5f3b6b7d)) +* Removes references as a "global-only" service ([1fef616](https://github.com/googleapis/python-error-reporting/commit/1fef61635e3f9d7297f9fe28d18568cf6c2a0fc1)) +* Updates documentation with regional resource names for multiple requests ([1fef616](https://github.com/googleapis/python-error-reporting/commit/1fef61635e3f9d7297f9fe28d18568cf6c2a0fc1)) + ## [1.11.0](https://github.com/googleapis/python-error-reporting/compare/v1.10.0...v1.11.0) (2024-03-26) diff --git a/google/cloud/error_reporting/gapic_version.py b/google/cloud/error_reporting/gapic_version.py index 98fb3f58..a034d27a 100644 --- a/google/cloud/error_reporting/gapic_version.py +++ b/google/cloud/error_reporting/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.11.1" # {x-release-please-version} diff --git a/google/cloud/errorreporting_v1beta1/gapic_version.py b/google/cloud/errorreporting_v1beta1/gapic_version.py index 98fb3f58..a034d27a 100644 --- a/google/cloud/errorreporting_v1beta1/gapic_version.py +++ b/google/cloud/errorreporting_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.0" # {x-release-please-version} +__version__ = "1.11.1" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index fedd655c..f8a87619 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "0.1.0" + "version": "1.11.1" }, "snippets": [ { From ffd913f89cf587d185a2c6757e8f570c2d06dd08 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 3 Mar 2025 11:58:23 -0500 Subject: [PATCH 100/111] chore(python): conditionally load credentials in .kokoro/build.sh (#551) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): conditionally load credentials in .kokoro/build.sh Source-Link: https://github.com/googleapis/synthtool/commit/aa69fb74717c8f4c58c60f8cc101d3f4b2c07b09 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert * remove post processing for release.cfg which no longer exists in templates * add constraints file for Python 3.13 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 6 +- .github/release-trigger.yml | 1 + .github/workflows/docs.yml | 2 +- .github/workflows/unittest.yml | 7 +- .kokoro/build.sh | 20 +- .kokoro/docker/docs/requirements.in | 1 + .kokoro/docker/docs/requirements.txt | 313 +++++++++++++++++-- .kokoro/docs/common.cfg | 1 + .kokoro/publish-docs.sh | 4 - .kokoro/release/common.cfg | 1 - .kokoro/samples/python3.13/common.cfg | 60 ++++ .kokoro/samples/python3.13/continuous.cfg | 6 + .kokoro/samples/python3.13/periodic-head.cfg | 11 + .kokoro/samples/python3.13/periodic.cfg | 6 + .kokoro/samples/python3.13/presubmit.cfg | 6 + .kokoro/test-samples-impl.sh | 3 +- CONTRIBUTING.rst | 6 +- README.rst | 93 +++++- noxfile.py | 18 +- owlbot.py | 2 +- renovate.json | 2 +- setup.py | 1 + testing/constraints-3.13.txt | 0 23 files changed, 512 insertions(+), 58 deletions(-) create mode 100644 .kokoro/samples/python3.13/common.cfg create mode 100644 .kokoro/samples/python3.13/continuous.cfg create mode 100644 .kokoro/samples/python3.13/periodic-head.cfg create mode 100644 .kokoro/samples/python3.13/periodic.cfg create mode 100644 .kokoro/samples/python3.13/presubmit.cfg create mode 100644 testing/constraints-3.13.txt diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 597e0c32..3f7634f2 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e8dcfd7cbfd8beac3a3ff8d3f3185287ea0625d859168cc80faccfc9a7a00455 -# created: 2024-09-16T21:04:09.091105552Z + digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf +# created: 2025-02-21T19:32:52.01306189Z diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml index d4ca9418..c1b20096 100644 --- a/.github/release-trigger.yml +++ b/.github/release-trigger.yml @@ -1 +1,2 @@ enabled: true +multiScmName: python-error-reporting diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 698fbc5c..2833fe98 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.10" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index dd8bd769..c66b757c 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -5,10 +5,13 @@ on: name: unittest jobs: unit: - runs-on: ubuntu-latest + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories + runs-on: ubuntu-22.04 strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] + python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] steps: - name: Checkout uses: actions/checkout@v4 diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 0c44d59c..d41b45aa 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,11 +15,13 @@ set -eo pipefail +CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") + if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-error-reporting" + PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") fi -cd "${PROJECT_ROOT}" +pushd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -28,10 +30,16 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] +then + export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +fi # Setup project id. -export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] +then + export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +fi # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. @@ -46,7 +54,7 @@ fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} + python3 -m nox -s ${NOX_SESSION:-} else - python3 -m nox + python3 -m nox fi diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in index 816817c6..586bd070 100644 --- a/.kokoro/docker/docs/requirements.in +++ b/.kokoro/docker/docs/requirements.in @@ -1 +1,2 @@ nox +gcp-docuploader diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt index 7129c771..a9360a25 100644 --- a/.kokoro/docker/docs/requirements.txt +++ b/.kokoro/docker/docs/requirements.txt @@ -1,42 +1,297 @@ # -# This file is autogenerated by pip-compile with Python 3.9 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.4.0 \ - --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ - --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f +argcomplete==3.5.3 \ + --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ + --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 # via nox -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 - # via nox -distlib==0.3.8 \ - --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ - --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a + # via google-auth +certifi==2024.12.14 \ + --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ + --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db + # via requests +charset-normalizer==3.4.1 \ + --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ + --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ + --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ + --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ + --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ + --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ + --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ + --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ + --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ + --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ + --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ + --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ + --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ + --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ + --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ + --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ + --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ + --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ + --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ + --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ + --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ + --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ + --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ + --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ + --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ + --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ + --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ + --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ + --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ + --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ + --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ + --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ + --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ + --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ + --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ + --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ + --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ + --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ + --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ + --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ + --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ + --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ + --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ + --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ + --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ + --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ + --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ + --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ + --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ + --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ + --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ + --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ + --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ + --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ + --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ + --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ + --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ + --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ + --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ + --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ + --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ + --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ + --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ + --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ + --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ + --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ + --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ + --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ + --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ + --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ + --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ + --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ + --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ + --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ + --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ + --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ + --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ + --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ + --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ + --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ + --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ + --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ + --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ + --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ + --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ + --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ + --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ + --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ + --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ + --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ + --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ + --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 + # via requests +click==8.1.8 \ + --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ + --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a + # via gcp-docuploader +colorlog==6.9.0 \ + --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ + --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 + # via + # gcp-docuploader + # nox +distlib==0.3.9 \ + --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ + --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 # via virtualenv -filelock==3.15.4 \ - --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ - --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 +filelock==3.16.1 \ + --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ + --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv -nox==2024.4.15 \ - --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ - --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea + # via -r requirements.in +google-api-core==2.24.0 \ + --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ + --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.37.0 \ + --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ + --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 + # via + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 + # via google-cloud-storage +google-cloud-storage==2.19.0 \ + --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ + --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 + # via gcp-docuploader +google-crc32c==1.6.0 \ + --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ + --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ + --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ + --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ + --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ + --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ + --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ + --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ + --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ + --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ + --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ + --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ + --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ + --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ + --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ + --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ + --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ + --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ + --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ + --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ + --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ + --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ + --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ + --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ + --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ + --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ + --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.7.2 \ + --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ + --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 + # via google-cloud-storage +googleapis-common-protos==1.66.0 \ + --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ + --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed + # via google-api-core +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 + # via requests +nox==2024.10.9 \ + --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ + --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 # via -r requirements.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 +packaging==24.2 \ + --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ + --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f # via nox -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 +platformdirs==4.3.6 \ + --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ + --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f +proto-plus==1.25.0 \ + --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ + --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 + # via google-api-core +protobuf==5.29.3 \ + --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ + --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ + --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ + --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ + --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ + --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ + --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ + --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ + --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ + --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ + --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 + # via + # gcp-docuploader + # google-api-core + # googleapis-common-protos + # proto-plus +pyasn1==0.6.1 \ + --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ + --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.4.1 \ + --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ + --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c + # via google-auth +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 + # via + # google-api-core + # google-cloud-storage +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +six==1.17.0 \ + --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ + --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 + # via gcp-docuploader +tomli==2.2.1 \ + --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ + --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ + --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ + --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ + --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ + --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ + --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ + --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ + --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ + --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ + --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ + --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ + --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ + --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ + --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ + --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ + --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ + --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ + --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ + --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ + --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ + --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ + --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ + --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ + --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ + --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ + --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ + --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ + --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ + --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ + --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ + --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via nox -virtualenv==20.26.3 \ - --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ - --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 +urllib3==2.3.0 \ + --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ + --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d + # via requests +virtualenv==20.28.1 \ + --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ + --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 # via nox diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index eade26cf..b53153fe 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -65,6 +65,7 @@ before_action { } } + ############################################# # this section merged from .kokoro/common_env_vars.cfg using owlbot.py diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 233205d5..4ed4aaf1 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -20,10 +20,6 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" -# Install nox -python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt -python3.10 -m nox --version - # build docs nox -s docs diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 346d4892..e0de520f 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -66,4 +66,3 @@ env_vars: { } ################################################### - diff --git a/.kokoro/samples/python3.13/common.cfg b/.kokoro/samples/python3.13/common.cfg new file mode 100644 index 00000000..e83c889e --- /dev/null +++ b/.kokoro/samples/python3.13/common.cfg @@ -0,0 +1,60 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.13" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-313" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-error-reporting/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-error-reporting/.kokoro/trampoline_v2.sh" + + +############################################# +# this section merged from .kokoro/common_env_vars.cfg using owlbot.py + +env_vars: { + key: "PRODUCT_AREA_LABEL" + value: "observability" +} +env_vars: { + key: "PRODUCT_LABEL" + value: "error-reporting" +} +env_vars: { + key: "LANGUAGE_LABEL" + value: "python" +} + +################################################### + diff --git a/.kokoro/samples/python3.13/continuous.cfg b/.kokoro/samples/python3.13/continuous.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.13/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.13/periodic-head.cfg b/.kokoro/samples/python3.13/periodic-head.cfg new file mode 100644 index 00000000..0ab001ca --- /dev/null +++ b/.kokoro/samples/python3.13/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-error-reporting/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.13/periodic.cfg b/.kokoro/samples/python3.13/periodic.cfg new file mode 100644 index 00000000..71cd1e59 --- /dev/null +++ b/.kokoro/samples/python3.13/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.13/presubmit.cfg b/.kokoro/samples/python3.13/presubmit.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.13/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index 55910c8b..53e365bc 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -33,7 +33,8 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.9 -m pip install --upgrade --quiet nox +# `virtualenv==20.26.6` is added for Python 3.7 compatibility +python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6 # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 5fc13e52..5307292a 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.12 -- -k + $ nox -s unit-3.13 -- -k .. note:: @@ -227,6 +227,7 @@ We support: - `Python 3.10`_ - `Python 3.11`_ - `Python 3.12`_ +- `Python 3.13`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ @@ -234,6 +235,7 @@ We support: .. _Python 3.10: https://docs.python.org/3.10/ .. _Python 3.11: https://docs.python.org/3.11/ .. _Python 3.12: https://docs.python.org/3.12/ +.. _Python 3.13: https://docs.python.org/3.13/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/README.rst b/README.rst index 445767c5..484923c1 100644 --- a/README.rst +++ b/README.rst @@ -26,12 +26,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ 3. `Enable the Error Reporting API.`_ -4. `Setup Authentication.`_ +4. `Set up Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Error Reporting API.: https://cloud.google.com/error-reporting -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +.. _Set up Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ @@ -106,3 +106,92 @@ Next Steps .. _Error Reporting API Product documentation: https://cloud.google.com/error-reporting .. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst + +Logging +------- + +This library uses the standard Python :code:`logging` functionality to log some RPC events that could be of interest for debugging and monitoring purposes. +Note the following: + +#. Logs may contain sensitive information. Take care to **restrict access to the logs** if they are saved, whether it be on local storage or on Google Cloud Logging. +#. Google may refine the occurrence, level, and content of various log messages in this library without flagging such changes as breaking. **Do not depend on immutability of the logging events**. +#. By default, the logging events from this library are not handled. You must **explicitly configure log handling** using one of the mechanisms below. + +Simple, environment-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +To enable logging for this library without any changes in your code, set the :code:`GOOGLE_SDK_PYTHON_LOGGING_SCOPE` environment variable to a valid Google +logging scope. This configures handling of logging events (at level :code:`logging.DEBUG` or higher) from this library in a default manner, emitting the logged +messages in a structured format. It does not currently allow customizing the logging levels captured nor the handlers, formatters, etc. used for any logging +event. + +A logging scope is a period-separated namespace that begins with :code:`google`, identifying the Python module or package to log. + +- Valid logging scopes: :code:`google`, :code:`google.cloud.asset.v1`, :code:`google.api`, :code:`google.auth`, etc. +- Invalid logging scopes: :code:`foo`, :code:`123`, etc. + +**NOTE**: If the logging scope is invalid, the library does not set up any logging handlers. + +Environment-Based Examples +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +- Enabling the default handler for all Google-based loggers + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google + +- Enabling the default handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: console + + export GOOGLE_SDK_PYTHON_LOGGING_SCOPE=google.cloud.library_v1 + + +Advanced, code-based configuration +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +You can also configure a valid logging scope using Python's standard `logging` mechanism. + +Code-Based Examples +^^^^^^^^^^^^^^^^^^^ + +- Configuring a handler for all Google-based loggers + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +- Configuring a handler for a specific Google module (for a client library called :code:`library_v1`): + +.. code-block:: python + + import logging + + from google.cloud.translate_v3 import translate + + base_logger = logging.getLogger("google.cloud.library_v1") + base_logger.addHandler(logging.StreamHandler()) + base_logger.setLevel(logging.DEBUG) + +Logging details +~~~~~~~~~~~~~~~ + +#. Regardless of which of the mechanisms above you use to configure logging for this library, by default logging events are not propagated up to the root + logger from the `google`-level logger. If you need the events to be propagated to the root logger, you must explicitly set + :code:`logging.getLogger("google").propagate = True` in your code. +#. You can mix the different logging configurations above for different Google modules. For example, you may want use a code-based logging configuration for + one library, but decide you need to also set up environment-based logging configuration for another library. + + #. If you attempt to use both code-based and environment-based configuration for the same module, the environment-based configuration will be ineffectual + if the code -based configuration gets applied first. + +#. The Google-specific logging configurations (default handlers for environment-based configuration; not propagating logging events to the root logger) get + executed the first time *any* client library is instantiated in your application, and only if the affected loggers have not been previously configured. + (This is the reason for 2.i. above.) diff --git a/noxfile.py b/noxfile.py index 7540caad..bc66951c 100644 --- a/noxfile.py +++ b/noxfile.py @@ -34,7 +34,15 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -62,7 +70,6 @@ CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() -# 'docfx' is excluded since it only needs to run in 'docs-presubmit' nox.options.sessions = [ "unit", "system", @@ -71,6 +78,7 @@ "lint_setup_py", "blacken", "docs", + "docfx", "format", ] @@ -167,7 +175,7 @@ def install_unittest_dependencies(session, *constraints): def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") constraints_path = str( @@ -367,7 +375,7 @@ def docfx(session): ) -@nox.session(python="3.12") +@nox.session(python="3.13") @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], @@ -375,7 +383,7 @@ def docfx(session): def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies diff --git a/owlbot.py b/owlbot.py index ded8fbe7..47e52719 100644 --- a/owlbot.py +++ b/owlbot.py @@ -79,7 +79,7 @@ # -------------------------------------------------------------------------- # add shared environment variables to test configs -tracked_subdirs = ["continuous", "presubmit", "release", "samples", "docs"] +tracked_subdirs = ["continuous", "presubmit", "samples", "docs"] for subdir in tracked_subdirs: for path, subdirs, files in os.walk(f".kokoro/{subdir}"): for name in files: diff --git a/renovate.json b/renovate.json index 39b2a0ec..c7875c46 100644 --- a/renovate.json +++ b/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } diff --git a/setup.py b/setup.py index 88311343..d3f038a7 100644 --- a/setup.py +++ b/setup.py @@ -82,6 +82,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt new file mode 100644 index 00000000..e69de29b From 1d120c77c73b566796b32cab017a0d4cdfa28713 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 3 Mar 2025 13:29:28 -0500 Subject: [PATCH 101/111] chore: Update gapic-generator-python to v1.23.2 (#529) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: remove body selector from http rule PiperOrigin-RevId: 693215877 Source-Link: https://github.com/googleapis/googleapis/commit/bb6b53e326ce2db403d18be7158c265e07948920 Source-Link: https://github.com/googleapis/googleapis-gen/commit/db8b5a93484ad44055b2bacc4c7cf87e970fe0ed Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZGI4YjVhOTM0ODRhZDQ0MDU1YjJiYWNjNGM3Y2Y4N2U5NzBmZTBlZCJ9 chore: Configure Ruby clients for google-ads-ad_manager PiperOrigin-RevId: 689139590 Source-Link: https://github.com/googleapis/googleapis/commit/296f2ac1aa9abccb7708b639b7839faa1809087f Source-Link: https://github.com/googleapis/googleapis-gen/commit/26927362e0aa1293258fc23fe3ce83c5c21d5fbb Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjY5MjczNjJlMGFhMTI5MzI1OGZjMjNmZTNjZTgzYzVjMjFkNWZiYiJ9 chore: Update gapic-generator-python to v1.19.1 PiperOrigin-RevId: 684571179 Source-Link: https://github.com/googleapis/googleapis/commit/fbdc238931e0a7a95c0f55e0cd3ad9e3de2535c8 Source-Link: https://github.com/googleapis/googleapis-gen/commit/3a2cdcfb80c2d0f5ec0cc663c2bab0a9486229d0 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiM2EyY2RjZmI4MGMyZDBmNWVjMGNjNjYzYzJiYWIwYTk0ODYyMjlkMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Add support for opt-in debug logging fix: Fix typing issue with gRPC metadata when key ends in -bin chore: Update gapic-generator-python to v1.21.0 PiperOrigin-RevId: 705285820 Source-Link: https://github.com/googleapis/googleapis/commit/f9b8b9150f7fcd600b0acaeef91236b1843f5e49 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ca1e0a1e472d6e6f5de883a5cb54724f112ce348 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2ExZTBhMWU0NzJkNmU2ZjVkZTg4M2E1Y2I1NDcyNGYxMTJjZTM0OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: Add REST Interceptors which support reading metadata feat: Add support for reading selective GAPIC generation methods from service YAML chore: Update gapic-generator-python to v1.22.0 PiperOrigin-RevId: 724026024 Source-Link: https://github.com/googleapis/googleapis/commit/ad9963857109513e77eed153a66264481789109f Source-Link: https://github.com/googleapis/googleapis-gen/commit/e291c4dd1d670eda19998de76f967e1603a48993 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTI5MWM0ZGQxZDY3MGVkYTE5OTk4ZGU3NmY5NjdlMTYwM2E0ODk5MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.23.2 PiperOrigin-RevId: 732281673 Source-Link: https://github.com/googleapis/googleapis/commit/2f37e0ad56637325b24f8603284ccb6f05796f9a Source-Link: https://github.com/googleapis/googleapis-gen/commit/016b7538ba5a798f2ae423d4ccd7f82b06cdf6d2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDE2Yjc1MzhiYTVhNzk4ZjJhZTQyM2Q0Y2NkN2Y4MmIwNmNkZjZkMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../error_group_service/async_client.py | 58 +- .../services/error_group_service/client.py | 130 +- .../error_group_service/transports/README.rst | 9 + .../error_group_service/transports/grpc.py | 94 +- .../transports/grpc_asyncio.py | 110 +- .../error_group_service/transports/rest.py | 409 +++-- .../transports/rest_base.py | 207 +++ .../error_stats_service/async_client.py | 71 +- .../services/error_stats_service/client.py | 143 +- .../services/error_stats_service/pagers.py | 32 +- .../error_stats_service/transports/README.rst | 9 + .../error_stats_service/transports/grpc.py | 96 +- .../transports/grpc_asyncio.py | 114 +- .../error_stats_service/transports/rest.py | 591 +++++-- .../transports/rest_base.py | 248 +++ .../report_errors_service/async_client.py | 45 +- .../services/report_errors_service/client.py | 117 +- .../transports/README.rst | 9 + .../report_errors_service/transports/grpc.py | 92 +- .../transports/grpc_asyncio.py | 106 +- .../report_errors_service/transports/rest.py | 231 ++- .../transports/rest_base.py | 150 ++ ....devtools.clouderrorreporting.v1beta1.json | 26 +- testing/constraints-3.13.txt | 6 + .../test_error_group_service.py | 1351 ++++++++------- .../test_error_stats_service.py | 1491 +++++++++-------- .../test_report_errors_service.py | 813 ++++----- 27 files changed, 4482 insertions(+), 2276 deletions(-) create mode 100644 google/cloud/errorreporting_v1beta1/services/error_group_service/transports/README.rst create mode 100644 google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest_base.py create mode 100644 google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/README.rst create mode 100644 google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest_base.py create mode 100644 google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/README.rst create mode 100644 google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest_base.py diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py index 21361c25..c4c8feae 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import ( @@ -49,6 +50,15 @@ from .transports.grpc_asyncio import ErrorGroupServiceGrpcAsyncIOTransport from .client import ErrorGroupServiceClient +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class ErrorGroupServiceAsyncClient: """Service for retrieving and updating individual error groups.""" @@ -260,6 +270,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.devtools.clouderrorreporting_v1beta1.ErrorGroupServiceAsyncClient`.", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService", + "credentialsType": None, + }, + ) + async def get_group( self, request: Optional[Union[error_group_service.GetGroupRequest, dict]] = None, @@ -267,7 +299,7 @@ async def get_group( group_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.ErrorGroup: r"""Get the specified group. @@ -329,8 +361,10 @@ async def sample_get_group(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.errorreporting_v1beta1.types.ErrorGroup: @@ -341,7 +375,10 @@ async def sample_get_group(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([group_name]) + flattened_params = [group_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -393,7 +430,7 @@ async def update_group( group: Optional[common.ErrorGroup] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.ErrorGroup: r"""Replace the data for the specified group. Fails if the group does not exist. @@ -437,8 +474,10 @@ async def sample_update_group(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.errorreporting_v1beta1.types.ErrorGroup: @@ -449,7 +488,10 @@ async def sample_update_group(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([group]) + flattened_params = [group] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py index 97cb9157..d4b2a29b 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py @@ -14,6 +14,9 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging import os import re from typing import ( @@ -48,6 +51,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.cloud.errorreporting_v1beta1.types import common from google.cloud.errorreporting_v1beta1.types import error_group_service from .transports.base import ErrorGroupServiceTransport, DEFAULT_CLIENT_INFO @@ -455,52 +467,45 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. Returns: - bool: True iff client_universe matches the universe in credentials. + bool: True iff the configured universe domain is valid. Raises: - ValueError: when client_universe does not match the universe in credentials. + ValueError: If the configured universe domain is not valid. """ - default_universe = ErrorGroupServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) + # NOTE (b/349488459): universe validation is disabled until further notice. return True - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. - Raises: - ValueError: If the configured universe domain is not valid. + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ErrorGroupServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) @property def api_endpoint(self): @@ -610,6 +615,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -676,6 +685,29 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.devtools.clouderrorreporting_v1beta1.ErrorGroupServiceClient`.", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService", + "credentialsType": None, + }, + ) + def get_group( self, request: Optional[Union[error_group_service.GetGroupRequest, dict]] = None, @@ -683,7 +715,7 @@ def get_group( group_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.ErrorGroup: r"""Get the specified group. @@ -745,8 +777,10 @@ def sample_get_group(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.errorreporting_v1beta1.types.ErrorGroup: @@ -757,7 +791,10 @@ def sample_get_group(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([group_name]) + flattened_params = [group_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -806,7 +843,7 @@ def update_group( group: Optional[common.ErrorGroup] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.ErrorGroup: r"""Replace the data for the specified group. Fails if the group does not exist. @@ -850,8 +887,10 @@ def sample_update_group(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.errorreporting_v1beta1.types.ErrorGroup: @@ -862,7 +901,10 @@ def sample_update_group(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([group]) + flattened_params = [group] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/README.rst b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/README.rst new file mode 100644 index 00000000..a0b01808 --- /dev/null +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`ErrorGroupServiceTransport` is the ABC for all transports. +- public child `ErrorGroupServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `ErrorGroupServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseErrorGroupServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `ErrorGroupServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py index 793b005b..3d24d3eb 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,13 +24,91 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.errorreporting_v1beta1.types import common from google.cloud.errorreporting_v1beta1.types import error_group_service from .base import ErrorGroupServiceTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class ErrorGroupServiceGrpcTransport(ErrorGroupServiceTransport): """gRPC backend transport for ErrorGroupService. @@ -181,7 +262,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -255,7 +341,7 @@ def get_group( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_group" not in self._stubs: - self._stubs["get_group"] = self.grpc_channel.unary_unary( + self._stubs["get_group"] = self._logged_channel.unary_unary( "/google.devtools.clouderrorreporting.v1beta1.ErrorGroupService/GetGroup", request_serializer=error_group_service.GetGroupRequest.serialize, response_deserializer=common.ErrorGroup.deserialize, @@ -282,7 +368,7 @@ def update_group( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_group" not in self._stubs: - self._stubs["update_group"] = self.grpc_channel.unary_unary( + self._stubs["update_group"] = self._logged_channel.unary_unary( "/google.devtools.clouderrorreporting.v1beta1.ErrorGroupService/UpdateGroup", request_serializer=error_group_service.UpdateGroupRequest.serialize, response_deserializer=common.ErrorGroup.deserialize, @@ -290,7 +376,7 @@ def update_group( return self._stubs["update_group"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def kind(self) -> str: diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py index 87365094..24e5f0eb 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,8 +26,11 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.errorreporting_v1beta1.types import common @@ -31,6 +38,82 @@ from .base import ErrorGroupServiceTransport, DEFAULT_CLIENT_INFO from .grpc import ErrorGroupServiceGrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class ErrorGroupServiceGrpcAsyncIOTransport(ErrorGroupServiceTransport): """gRPC AsyncIO backend transport for ErrorGroupService. @@ -227,7 +310,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -259,7 +348,7 @@ def get_group( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_group" not in self._stubs: - self._stubs["get_group"] = self.grpc_channel.unary_unary( + self._stubs["get_group"] = self._logged_channel.unary_unary( "/google.devtools.clouderrorreporting.v1beta1.ErrorGroupService/GetGroup", request_serializer=error_group_service.GetGroupRequest.serialize, response_deserializer=common.ErrorGroup.deserialize, @@ -288,7 +377,7 @@ def update_group( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_group" not in self._stubs: - self._stubs["update_group"] = self.grpc_channel.unary_unary( + self._stubs["update_group"] = self._logged_channel.unary_unary( "/google.devtools.clouderrorreporting.v1beta1.ErrorGroupService/UpdateGroup", request_serializer=error_group_service.UpdateGroupRequest.serialize, response_deserializer=common.ErrorGroup.deserialize, @@ -298,20 +387,29 @@ def update_group( def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.get_group: gapic_v1.method_async.wrap_method( + self.get_group: self._wrap_method( self.get_group, default_timeout=None, client_info=client_info, ), - self.update_group: gapic_v1.method_async.wrap_method( + self.update_group: self._wrap_method( self.update_group, default_timeout=None, client_info=client_info, ), } + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" __all__ = ("ErrorGroupServiceGrpcAsyncIOTransport",) diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py index 900d875a..80bba4e5 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py @@ -13,45 +13,50 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings + +from google.cloud.errorreporting_v1beta1.types import common +from google.cloud.errorreporting_v1beta1.types import error_group_service + + +from .rest_base import _BaseErrorGroupServiceRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore -from google.cloud.errorreporting_v1beta1.types import common -from google.cloud.errorreporting_v1beta1.types import error_group_service - -from .base import ( - ErrorGroupServiceTransport, - DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, -) + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -95,8 +100,10 @@ def post_update_group(self, response): def pre_get_group( self, request: error_group_service.GetGroupRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[error_group_service.GetGroupRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + error_group_service.GetGroupRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_group Override in a subclass to manipulate the request or metadata @@ -107,17 +114,42 @@ def pre_get_group( def post_get_group(self, response: common.ErrorGroup) -> common.ErrorGroup: """Post-rpc interceptor for get_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ErrorGroupService server but before - it is returned to user code. + it is returned to user code. This `post_get_group` interceptor runs + before the `post_get_group_with_metadata` interceptor. """ return response + def post_get_group_with_metadata( + self, + response: common.ErrorGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.ErrorGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ErrorGroupService server but before it is returned to user code. + + We recommend only using this `post_get_group_with_metadata` + interceptor in new development instead of the `post_get_group` interceptor. + When both interceptors are used, this `post_get_group_with_metadata` interceptor runs after the + `post_get_group` interceptor. The (possibly modified) response returned by + `post_get_group` will be passed to + `post_get_group_with_metadata`. + """ + return response, metadata + def pre_update_group( self, request: error_group_service.UpdateGroupRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[error_group_service.UpdateGroupRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + error_group_service.UpdateGroupRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for update_group Override in a subclass to manipulate the request or metadata @@ -128,12 +160,35 @@ def pre_update_group( def post_update_group(self, response: common.ErrorGroup) -> common.ErrorGroup: """Post-rpc interceptor for update_group - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ErrorGroupService server but before - it is returned to user code. + it is returned to user code. This `post_update_group` interceptor runs + before the `post_update_group_with_metadata` interceptor. """ return response + def post_update_group_with_metadata( + self, + response: common.ErrorGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[common.ErrorGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ErrorGroupService server but before it is returned to user code. + + We recommend only using this `post_update_group_with_metadata` + interceptor in new development instead of the `post_update_group` interceptor. + When both interceptors are used, this `post_update_group_with_metadata` interceptor runs after the + `post_update_group` interceptor. The (possibly modified) response returned by + `post_update_group` will be passed to + `post_update_group_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ErrorGroupServiceRestStub: @@ -142,8 +197,8 @@ class ErrorGroupServiceRestStub: _interceptor: ErrorGroupServiceRestInterceptor -class ErrorGroupServiceRestTransport(ErrorGroupServiceTransport): - """REST backend transport for ErrorGroupService. +class ErrorGroupServiceRestTransport(_BaseErrorGroupServiceRestTransport): + """REST backend synchronous transport for ErrorGroupService. Service for retrieving and updating individual error groups. @@ -152,7 +207,6 @@ class ErrorGroupServiceRestTransport(ErrorGroupServiceTransport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - """ def __init__( @@ -206,21 +260,12 @@ def __init__( # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, api_audience=api_audience, ) self._session = AuthorizedSession( @@ -231,19 +276,33 @@ def __init__( self._interceptor = interceptor or ErrorGroupServiceRestInterceptor() self._prep_wrapped_messages(client_info) - class _GetGroup(ErrorGroupServiceRestStub): + class _GetGroup( + _BaseErrorGroupServiceRestTransport._BaseGetGroup, ErrorGroupServiceRestStub + ): def __hash__(self): - return hash("GetGroup") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("ErrorGroupServiceRestTransport.GetGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -251,7 +310,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.ErrorGroup: r"""Call the get group method over HTTP. @@ -262,8 +321,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.common.ErrorGroup: @@ -272,42 +333,55 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1beta1/{group_name=projects/*/groups/*}", - }, - { - "method": "get", - "uri": "/v1beta1/{group_name=projects/*/locations/*/groups/*}", - }, - ] - request, metadata = self._interceptor.pre_get_group(request, metadata) - pb_request = error_group_service.GetGroupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseErrorGroupServiceRestTransport._BaseGetGroup._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_get_group(request, metadata) + transcoded_request = _BaseErrorGroupServiceRestTransport._BaseGetGroup._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseErrorGroupServiceRestTransport._BaseGetGroup._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.devtools.clouderrorreporting_v1beta1.ErrorGroupServiceClient.GetGroup", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService", + "rpcName": "GetGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = ErrorGroupServiceRestTransport._GetGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -320,22 +394,63 @@ def __call__( pb_resp = common.ErrorGroup.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = common.ErrorGroup.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.devtools.clouderrorreporting_v1beta1.ErrorGroupServiceClient.get_group", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService", + "rpcName": "GetGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _UpdateGroup(ErrorGroupServiceRestStub): + class _UpdateGroup( + _BaseErrorGroupServiceRestTransport._BaseUpdateGroup, ErrorGroupServiceRestStub + ): def __hash__(self): - return hash("UpdateGroup") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("ErrorGroupServiceRestTransport.UpdateGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -343,7 +458,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> common.ErrorGroup: r"""Call the update group method over HTTP. @@ -354,8 +469,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.common.ErrorGroup: @@ -364,50 +481,60 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "put", - "uri": "/v1beta1/{group.name=projects/*/groups/*}", - "body": "group", - }, - { - "method": "put", - "uri": "/v1beta1/{group.name=projects/*/locations/*/groups/*}", - "body": "group", - }, - ] - request, metadata = self._interceptor.pre_update_group(request, metadata) - pb_request = error_group_service.UpdateGroupRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseErrorGroupServiceRestTransport._BaseUpdateGroup._get_http_options() + ) - # Jsonify the request body + request, metadata = self._interceptor.pre_update_group(request, metadata) + transcoded_request = _BaseErrorGroupServiceRestTransport._BaseUpdateGroup._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseErrorGroupServiceRestTransport._BaseUpdateGroup._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseErrorGroupServiceRestTransport._BaseUpdateGroup._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.devtools.clouderrorreporting_v1beta1.ErrorGroupServiceClient.UpdateGroup", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService", + "rpcName": "UpdateGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = ErrorGroupServiceRestTransport._UpdateGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -420,7 +547,33 @@ def __call__( pb_resp = common.ErrorGroup.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = common.ErrorGroup.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.devtools.clouderrorreporting_v1beta1.ErrorGroupServiceClient.update_group", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService", + "rpcName": "UpdateGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest_base.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest_base.py new file mode 100644 index 00000000..0668d9ac --- /dev/null +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest_base.py @@ -0,0 +1,207 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import ErrorGroupServiceTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.errorreporting_v1beta1.types import common +from google.cloud.errorreporting_v1beta1.types import error_group_service + + +class _BaseErrorGroupServiceRestTransport(ErrorGroupServiceTransport): + """Base REST backend transport for ErrorGroupService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "clouderrorreporting.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'clouderrorreporting.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseGetGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{group_name=projects/*/groups/*}", + }, + { + "method": "get", + "uri": "/v1beta1/{group_name=projects/*/locations/*/groups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = error_group_service.GetGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseErrorGroupServiceRestTransport._BaseGetGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1beta1/{group.name=projects/*/groups/*}", + "body": "group", + }, + { + "method": "put", + "uri": "/v1beta1/{group.name=projects/*/locations/*/groups/*}", + "body": "group", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = error_group_service.UpdateGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseErrorGroupServiceRestTransport._BaseUpdateGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + +__all__ = ("_BaseErrorGroupServiceRestTransport",) diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py index 232049f7..3b804ae4 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import ( @@ -50,6 +51,15 @@ from .transports.grpc_asyncio import ErrorStatsServiceGrpcAsyncIOTransport from .client import ErrorStatsServiceClient +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class ErrorStatsServiceAsyncClient: """An API for retrieving and managing error statistics as well @@ -263,6 +273,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.devtools.clouderrorreporting_v1beta1.ErrorStatsServiceAsyncClient`.", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", + "credentialsType": None, + }, + ) + async def list_group_stats( self, request: Optional[ @@ -273,7 +305,7 @@ async def list_group_stats( time_range: Optional[error_stats_service.QueryTimeRange] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListGroupStatsAsyncPager: r"""Lists the specified groups. @@ -354,8 +386,10 @@ async def sample_list_group_stats(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.errorreporting_v1beta1.services.error_stats_service.pagers.ListGroupStatsAsyncPager: @@ -369,7 +403,10 @@ async def sample_list_group_stats(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_name, time_range]) + flattened_params = [project_name, time_range] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -435,7 +472,7 @@ async def list_events( group_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListEventsAsyncPager: r"""Lists the specified events. @@ -504,8 +541,10 @@ async def sample_list_events(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.errorreporting_v1beta1.services.error_stats_service.pagers.ListEventsAsyncPager: @@ -519,7 +558,10 @@ async def sample_list_events(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_name, group_id]) + flattened_params = [project_name, group_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -584,7 +626,7 @@ async def delete_events( project_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> error_stats_service.DeleteEventsResponse: r"""Deletes all error events of a given project. @@ -638,8 +680,10 @@ async def sample_delete_events(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.errorreporting_v1beta1.types.DeleteEventsResponse: @@ -650,7 +694,10 @@ async def sample_delete_events(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_name]) + flattened_params = [project_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py index 2f128d1a..6f67b822 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py @@ -14,6 +14,9 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging import os import re from typing import ( @@ -48,6 +51,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.cloud.errorreporting_v1beta1.services.error_stats_service import pagers from google.cloud.errorreporting_v1beta1.types import common from google.cloud.errorreporting_v1beta1.types import error_stats_service @@ -458,52 +470,45 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. Returns: - bool: True iff client_universe matches the universe in credentials. + bool: True iff the configured universe domain is valid. Raises: - ValueError: when client_universe does not match the universe in credentials. + ValueError: If the configured universe domain is not valid. """ - default_universe = ErrorStatsServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) + # NOTE (b/349488459): universe validation is disabled until further notice. return True - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. - Raises: - ValueError: If the configured universe domain is not valid. + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ErrorStatsServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) @property def api_endpoint(self): @@ -613,6 +618,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -679,6 +688,29 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.devtools.clouderrorreporting_v1beta1.ErrorStatsServiceClient`.", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", + "credentialsType": None, + }, + ) + def list_group_stats( self, request: Optional[ @@ -689,7 +721,7 @@ def list_group_stats( time_range: Optional[error_stats_service.QueryTimeRange] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListGroupStatsPager: r"""Lists the specified groups. @@ -770,8 +802,10 @@ def sample_list_group_stats(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.errorreporting_v1beta1.services.error_stats_service.pagers.ListGroupStatsPager: @@ -785,7 +819,10 @@ def sample_list_group_stats(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_name, time_range]) + flattened_params = [project_name, time_range] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -848,7 +885,7 @@ def list_events( group_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListEventsPager: r"""Lists the specified events. @@ -917,8 +954,10 @@ def sample_list_events(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.errorreporting_v1beta1.services.error_stats_service.pagers.ListEventsPager: @@ -932,7 +971,10 @@ def sample_list_events(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_name, group_id]) + flattened_params = [project_name, group_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -994,7 +1036,7 @@ def delete_events( project_name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> error_stats_service.DeleteEventsResponse: r"""Deletes all error events of a given project. @@ -1048,8 +1090,10 @@ def sample_delete_events(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.errorreporting_v1beta1.types.DeleteEventsResponse: @@ -1060,7 +1104,10 @@ def sample_delete_events(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_name]) + flattened_params = [project_name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py index 274f7ac2..277a4112 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py @@ -67,7 +67,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -81,8 +81,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = error_stats_service.ListGroupStatsRequest(request) @@ -141,7 +143,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -155,8 +157,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = error_stats_service.ListGroupStatsRequest(request) @@ -219,7 +223,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -233,8 +237,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = error_stats_service.ListEventsRequest(request) @@ -293,7 +299,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -307,8 +313,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = error_stats_service.ListEventsRequest(request) diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/README.rst b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/README.rst new file mode 100644 index 00000000..9fb4cf06 --- /dev/null +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`ErrorStatsServiceTransport` is the ABC for all transports. +- public child `ErrorStatsServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `ErrorStatsServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseErrorStatsServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `ErrorStatsServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py index b1f71dd3..08a27874 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,12 +24,90 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.errorreporting_v1beta1.types import error_stats_service from .base import ErrorStatsServiceTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class ErrorStatsServiceGrpcTransport(ErrorStatsServiceTransport): """gRPC backend transport for ErrorStatsService. @@ -181,7 +262,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -258,7 +344,7 @@ def list_group_stats( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_group_stats" not in self._stubs: - self._stubs["list_group_stats"] = self.grpc_channel.unary_unary( + self._stubs["list_group_stats"] = self._logged_channel.unary_unary( "/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/ListGroupStats", request_serializer=error_stats_service.ListGroupStatsRequest.serialize, response_deserializer=error_stats_service.ListGroupStatsResponse.deserialize, @@ -286,7 +372,7 @@ def list_events( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_events" not in self._stubs: - self._stubs["list_events"] = self.grpc_channel.unary_unary( + self._stubs["list_events"] = self._logged_channel.unary_unary( "/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/ListEvents", request_serializer=error_stats_service.ListEventsRequest.serialize, response_deserializer=error_stats_service.ListEventsResponse.deserialize, @@ -315,7 +401,7 @@ def delete_events( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_events" not in self._stubs: - self._stubs["delete_events"] = self.grpc_channel.unary_unary( + self._stubs["delete_events"] = self._logged_channel.unary_unary( "/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/DeleteEvents", request_serializer=error_stats_service.DeleteEventsRequest.serialize, response_deserializer=error_stats_service.DeleteEventsResponse.deserialize, @@ -323,7 +409,7 @@ def delete_events( return self._stubs["delete_events"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def kind(self) -> str: diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py index 895a1295..7085b89b 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,14 +26,93 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.errorreporting_v1beta1.types import error_stats_service from .base import ErrorStatsServiceTransport, DEFAULT_CLIENT_INFO from .grpc import ErrorStatsServiceGrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class ErrorStatsServiceGrpcAsyncIOTransport(ErrorStatsServiceTransport): """gRPC AsyncIO backend transport for ErrorStatsService. @@ -227,7 +310,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -262,7 +351,7 @@ def list_group_stats( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_group_stats" not in self._stubs: - self._stubs["list_group_stats"] = self.grpc_channel.unary_unary( + self._stubs["list_group_stats"] = self._logged_channel.unary_unary( "/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/ListGroupStats", request_serializer=error_stats_service.ListGroupStatsRequest.serialize, response_deserializer=error_stats_service.ListGroupStatsResponse.deserialize, @@ -291,7 +380,7 @@ def list_events( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_events" not in self._stubs: - self._stubs["list_events"] = self.grpc_channel.unary_unary( + self._stubs["list_events"] = self._logged_channel.unary_unary( "/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/ListEvents", request_serializer=error_stats_service.ListEventsRequest.serialize, response_deserializer=error_stats_service.ListEventsResponse.deserialize, @@ -320,7 +409,7 @@ def delete_events( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_events" not in self._stubs: - self._stubs["delete_events"] = self.grpc_channel.unary_unary( + self._stubs["delete_events"] = self._logged_channel.unary_unary( "/google.devtools.clouderrorreporting.v1beta1.ErrorStatsService/DeleteEvents", request_serializer=error_stats_service.DeleteEventsRequest.serialize, response_deserializer=error_stats_service.DeleteEventsResponse.deserialize, @@ -330,25 +419,34 @@ def delete_events( def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.list_group_stats: gapic_v1.method_async.wrap_method( + self.list_group_stats: self._wrap_method( self.list_group_stats, default_timeout=None, client_info=client_info, ), - self.list_events: gapic_v1.method_async.wrap_method( + self.list_events: self._wrap_method( self.list_events, default_timeout=None, client_info=client_info, ), - self.delete_events: gapic_v1.method_async.wrap_method( + self.delete_events: self._wrap_method( self.delete_events, default_timeout=None, client_info=client_info, ), } + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" __all__ = ("ErrorStatsServiceGrpcAsyncIOTransport",) diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py index f7cbbf8f..11a6cdd9 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py @@ -13,44 +13,49 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings + +from google.cloud.errorreporting_v1beta1.types import error_stats_service + + +from .rest_base import _BaseErrorStatsServiceRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore -from google.cloud.errorreporting_v1beta1.types import error_stats_service - -from .base import ( - ErrorStatsServiceTransport, - DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, -) + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -102,8 +107,10 @@ def post_list_group_stats(self, response): def pre_delete_events( self, request: error_stats_service.DeleteEventsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[error_stats_service.DeleteEventsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + error_stats_service.DeleteEventsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_events Override in a subclass to manipulate the request or metadata @@ -116,17 +123,45 @@ def post_delete_events( ) -> error_stats_service.DeleteEventsResponse: """Post-rpc interceptor for delete_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ErrorStatsService server but before - it is returned to user code. + it is returned to user code. This `post_delete_events` interceptor runs + before the `post_delete_events_with_metadata` interceptor. """ return response + def post_delete_events_with_metadata( + self, + response: error_stats_service.DeleteEventsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + error_stats_service.DeleteEventsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for delete_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ErrorStatsService server but before it is returned to user code. + + We recommend only using this `post_delete_events_with_metadata` + interceptor in new development instead of the `post_delete_events` interceptor. + When both interceptors are used, this `post_delete_events_with_metadata` interceptor runs after the + `post_delete_events` interceptor. The (possibly modified) response returned by + `post_delete_events` will be passed to + `post_delete_events_with_metadata`. + """ + return response, metadata + def pre_list_events( self, request: error_stats_service.ListEventsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[error_stats_service.ListEventsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + error_stats_service.ListEventsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_events Override in a subclass to manipulate the request or metadata @@ -139,17 +174,45 @@ def post_list_events( ) -> error_stats_service.ListEventsResponse: """Post-rpc interceptor for list_events - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_events_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ErrorStatsService server but before - it is returned to user code. + it is returned to user code. This `post_list_events` interceptor runs + before the `post_list_events_with_metadata` interceptor. """ return response + def post_list_events_with_metadata( + self, + response: error_stats_service.ListEventsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + error_stats_service.ListEventsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_events + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ErrorStatsService server but before it is returned to user code. + + We recommend only using this `post_list_events_with_metadata` + interceptor in new development instead of the `post_list_events` interceptor. + When both interceptors are used, this `post_list_events_with_metadata` interceptor runs after the + `post_list_events` interceptor. The (possibly modified) response returned by + `post_list_events` will be passed to + `post_list_events_with_metadata`. + """ + return response, metadata + def pre_list_group_stats( self, request: error_stats_service.ListGroupStatsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[error_stats_service.ListGroupStatsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + error_stats_service.ListGroupStatsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for list_group_stats Override in a subclass to manipulate the request or metadata @@ -162,12 +225,38 @@ def post_list_group_stats( ) -> error_stats_service.ListGroupStatsResponse: """Post-rpc interceptor for list_group_stats - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_group_stats_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ErrorStatsService server but before - it is returned to user code. + it is returned to user code. This `post_list_group_stats` interceptor runs + before the `post_list_group_stats_with_metadata` interceptor. """ return response + def post_list_group_stats_with_metadata( + self, + response: error_stats_service.ListGroupStatsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + error_stats_service.ListGroupStatsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_group_stats + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ErrorStatsService server but before it is returned to user code. + + We recommend only using this `post_list_group_stats_with_metadata` + interceptor in new development instead of the `post_list_group_stats` interceptor. + When both interceptors are used, this `post_list_group_stats_with_metadata` interceptor runs after the + `post_list_group_stats` interceptor. The (possibly modified) response returned by + `post_list_group_stats` will be passed to + `post_list_group_stats_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ErrorStatsServiceRestStub: @@ -176,8 +265,8 @@ class ErrorStatsServiceRestStub: _interceptor: ErrorStatsServiceRestInterceptor -class ErrorStatsServiceRestTransport(ErrorStatsServiceTransport): - """REST backend transport for ErrorStatsService. +class ErrorStatsServiceRestTransport(_BaseErrorStatsServiceRestTransport): + """REST backend synchronous transport for ErrorStatsService. An API for retrieving and managing error statistics as well as data for individual events. @@ -187,7 +276,6 @@ class ErrorStatsServiceRestTransport(ErrorStatsServiceTransport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - """ def __init__( @@ -241,21 +329,12 @@ def __init__( # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, api_audience=api_audience, ) self._session = AuthorizedSession( @@ -266,19 +345,33 @@ def __init__( self._interceptor = interceptor or ErrorStatsServiceRestInterceptor() self._prep_wrapped_messages(client_info) - class _DeleteEvents(ErrorStatsServiceRestStub): + class _DeleteEvents( + _BaseErrorStatsServiceRestTransport._BaseDeleteEvents, ErrorStatsServiceRestStub + ): def __hash__(self): - return hash("DeleteEvents") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("ErrorStatsServiceRestTransport.DeleteEvents") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -286,7 +379,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> error_stats_service.DeleteEventsResponse: r"""Call the delete events method over HTTP. @@ -296,8 +389,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.error_stats_service.DeleteEventsResponse: @@ -306,42 +401,55 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1beta1/{project_name=projects/*}/events", - }, - { - "method": "delete", - "uri": "/v1beta1/{project_name=projects/*/locations/*}/events", - }, - ] - request, metadata = self._interceptor.pre_delete_events(request, metadata) - pb_request = error_stats_service.DeleteEventsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseErrorStatsServiceRestTransport._BaseDeleteEvents._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_delete_events(request, metadata) + transcoded_request = _BaseErrorStatsServiceRestTransport._BaseDeleteEvents._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseErrorStatsServiceRestTransport._BaseDeleteEvents._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.devtools.clouderrorreporting_v1beta1.ErrorStatsServiceClient.DeleteEvents", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", + "rpcName": "DeleteEvents", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = ErrorStatsServiceRestTransport._DeleteEvents._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -354,24 +462,64 @@ def __call__( pb_resp = error_stats_service.DeleteEventsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_events_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = error_stats_service.DeleteEventsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.devtools.clouderrorreporting_v1beta1.ErrorStatsServiceClient.delete_events", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", + "rpcName": "DeleteEvents", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _ListEvents(ErrorStatsServiceRestStub): + class _ListEvents( + _BaseErrorStatsServiceRestTransport._BaseListEvents, ErrorStatsServiceRestStub + ): def __hash__(self): - return hash("ListEvents") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { - "groupId": "", - } - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("ErrorStatsServiceRestTransport.ListEvents") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -379,7 +527,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> error_stats_service.ListEventsResponse: r"""Call the list events method over HTTP. @@ -390,8 +538,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.error_stats_service.ListEventsResponse: @@ -400,42 +550,55 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1beta1/{project_name=projects/*}/events", - }, - { - "method": "get", - "uri": "/v1beta1/{project_name=projects/*/locations/*}/events", - }, - ] - request, metadata = self._interceptor.pre_list_events(request, metadata) - pb_request = error_stats_service.ListEventsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) + http_options = ( + _BaseErrorStatsServiceRestTransport._BaseListEvents._get_http_options() + ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + request, metadata = self._interceptor.pre_list_events(request, metadata) + transcoded_request = _BaseErrorStatsServiceRestTransport._BaseListEvents._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseErrorStatsServiceRestTransport._BaseListEvents._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.devtools.clouderrorreporting_v1beta1.ErrorStatsServiceClient.ListEvents", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", + "rpcName": "ListEvents", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = ErrorStatsServiceRestTransport._ListEvents._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -448,22 +611,65 @@ def __call__( pb_resp = error_stats_service.ListEventsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_events(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_events_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = error_stats_service.ListEventsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.devtools.clouderrorreporting_v1beta1.ErrorStatsServiceClient.list_events", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", + "rpcName": "ListEvents", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp - class _ListGroupStats(ErrorStatsServiceRestStub): + class _ListGroupStats( + _BaseErrorStatsServiceRestTransport._BaseListGroupStats, + ErrorStatsServiceRestStub, + ): def __hash__(self): - return hash("ListGroupStats") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("ErrorStatsServiceRestTransport.ListGroupStats") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -471,7 +677,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> error_stats_service.ListGroupStatsResponse: r"""Call the list group stats method over HTTP. @@ -481,8 +687,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.error_stats_service.ListGroupStatsResponse: @@ -491,44 +699,57 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1beta1/{project_name=projects/*}/groupStats", - }, - { - "method": "get", - "uri": "/v1beta1/{project_name=projects/*/locations/*}/groupStats", - }, - ] + http_options = ( + _BaseErrorStatsServiceRestTransport._BaseListGroupStats._get_http_options() + ) + request, metadata = self._interceptor.pre_list_group_stats( request, metadata ) - pb_request = error_stats_service.ListGroupStatsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseErrorStatsServiceRestTransport._BaseListGroupStats._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseErrorStatsServiceRestTransport._BaseListGroupStats._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.devtools.clouderrorreporting_v1beta1.ErrorStatsServiceClient.ListGroupStats", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", + "rpcName": "ListGroupStats", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = ErrorStatsServiceRestTransport._ListGroupStats._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -541,7 +762,35 @@ def __call__( pb_resp = error_stats_service.ListGroupStatsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_group_stats(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_group_stats_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + error_stats_service.ListGroupStatsResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.devtools.clouderrorreporting_v1beta1.ErrorStatsServiceClient.list_group_stats", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", + "rpcName": "ListGroupStats", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest_base.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest_base.py new file mode 100644 index 00000000..cd1c2a0f --- /dev/null +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest_base.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import ErrorStatsServiceTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.errorreporting_v1beta1.types import error_stats_service + + +class _BaseErrorStatsServiceRestTransport(ErrorStatsServiceTransport): + """Base REST backend transport for ErrorStatsService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "clouderrorreporting.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'clouderrorreporting.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseDeleteEvents: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1beta1/{project_name=projects/*}/events", + }, + { + "method": "delete", + "uri": "/v1beta1/{project_name=projects/*/locations/*}/events", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = error_stats_service.DeleteEventsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseErrorStatsServiceRestTransport._BaseDeleteEvents._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListEvents: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "groupId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{project_name=projects/*}/events", + }, + { + "method": "get", + "uri": "/v1beta1/{project_name=projects/*/locations/*}/events", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = error_stats_service.ListEventsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseErrorStatsServiceRestTransport._BaseListEvents._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListGroupStats: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1beta1/{project_name=projects/*}/groupStats", + }, + { + "method": "get", + "uri": "/v1beta1/{project_name=projects/*/locations/*}/groupStats", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = error_stats_service.ListGroupStatsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseErrorStatsServiceRestTransport._BaseListGroupStats._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + +__all__ = ("_BaseErrorStatsServiceRestTransport",) diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py index 6c0ef660..76e85eed 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import ( @@ -48,6 +49,15 @@ from .transports.grpc_asyncio import ReportErrorsServiceGrpcAsyncIOTransport from .client import ReportErrorsServiceClient +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class ReportErrorsServiceAsyncClient: """An API for reporting error events.""" @@ -255,6 +265,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.devtools.clouderrorreporting_v1beta1.ReportErrorsServiceAsyncClient`.", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService", + "credentialsType": None, + }, + ) + async def report_error_event( self, request: Optional[ @@ -265,7 +297,7 @@ async def report_error_event( event: Optional[report_errors_service.ReportedErrorEvent] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> report_errors_service.ReportErrorEventResponse: r"""Report an individual error event and record the event to a log. @@ -345,8 +377,10 @@ async def sample_report_error_event(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.errorreporting_v1beta1.types.ReportErrorEventResponse: @@ -358,7 +392,10 @@ async def sample_report_error_event(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_name, event]) + flattened_params = [project_name, event] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py index 212c5811..aa9f01e8 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py @@ -14,6 +14,9 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging import os import re from typing import ( @@ -48,6 +51,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.cloud.errorreporting_v1beta1.types import report_errors_service from .transports.base import ReportErrorsServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import ReportErrorsServiceGrpcTransport @@ -437,52 +449,45 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. Returns: - bool: True iff client_universe matches the universe in credentials. + bool: True iff the configured universe domain is valid. Raises: - ValueError: when client_universe does not match the universe in credentials. + ValueError: If the configured universe domain is not valid. """ - default_universe = ReportErrorsServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) + # NOTE (b/349488459): universe validation is disabled until further notice. return True - def _validate_universe_domain(self): - """Validates client's and credentials' universe domains are consistent. - - Returns: - bool: True iff the configured universe domain is valid. + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. - Raises: - ValueError: If the configured universe domain is not valid. + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or ReportErrorsServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) @property def api_endpoint(self): @@ -592,6 +597,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -658,6 +667,29 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.devtools.clouderrorreporting_v1beta1.ReportErrorsServiceClient`.", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService", + "credentialsType": None, + }, + ) + def report_error_event( self, request: Optional[ @@ -668,7 +700,7 @@ def report_error_event( event: Optional[report_errors_service.ReportedErrorEvent] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> report_errors_service.ReportErrorEventResponse: r"""Report an individual error event and record the event to a log. @@ -748,8 +780,10 @@ def sample_report_error_event(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.cloud.errorreporting_v1beta1.types.ReportErrorEventResponse: @@ -761,7 +795,10 @@ def sample_report_error_event(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project_name, event]) + flattened_params = [project_name, event] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/README.rst b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/README.rst new file mode 100644 index 00000000..d70e9010 --- /dev/null +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`ReportErrorsServiceTransport` is the ABC for all transports. +- public child `ReportErrorsServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `ReportErrorsServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseReportErrorsServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `ReportErrorsServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py index 0cbcc619..e9cf45f3 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,12 +24,90 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.cloud.errorreporting_v1beta1.types import report_errors_service from .base import ReportErrorsServiceTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class ReportErrorsServiceGrpcTransport(ReportErrorsServiceTransport): """gRPC backend transport for ReportErrorsService. @@ -180,7 +261,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -278,7 +364,7 @@ def report_error_event( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "report_error_event" not in self._stubs: - self._stubs["report_error_event"] = self.grpc_channel.unary_unary( + self._stubs["report_error_event"] = self._logged_channel.unary_unary( "/google.devtools.clouderrorreporting.v1beta1.ReportErrorsService/ReportErrorEvent", request_serializer=report_errors_service.ReportErrorEventRequest.serialize, response_deserializer=report_errors_service.ReportErrorEventResponse.deserialize, @@ -286,7 +372,7 @@ def report_error_event( return self._stubs["report_error_event"] def close(self): - self.grpc_channel.close() + self._logged_channel.close() @property def kind(self) -> str: diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py index 5b3612ba..609b0be2 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -22,14 +26,93 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.errorreporting_v1beta1.types import report_errors_service from .base import ReportErrorsServiceTransport, DEFAULT_CLIENT_INFO from .grpc import ReportErrorsServiceGrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class ReportErrorsServiceGrpcAsyncIOTransport(ReportErrorsServiceTransport): """gRPC AsyncIO backend transport for ReportErrorsService. @@ -226,7 +309,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -282,7 +371,7 @@ def report_error_event( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "report_error_event" not in self._stubs: - self._stubs["report_error_event"] = self.grpc_channel.unary_unary( + self._stubs["report_error_event"] = self._logged_channel.unary_unary( "/google.devtools.clouderrorreporting.v1beta1.ReportErrorsService/ReportErrorEvent", request_serializer=report_errors_service.ReportErrorEventRequest.serialize, response_deserializer=report_errors_service.ReportErrorEventResponse.deserialize, @@ -292,15 +381,24 @@ def report_error_event( def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.report_error_event: gapic_v1.method_async.wrap_method( + self.report_error_event: self._wrap_method( self.report_error_event, default_timeout=None, client_info=client_info, ), } + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" __all__ = ("ReportErrorsServiceGrpcAsyncIOTransport",) diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py index 64aa2ae0..b6120041 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py @@ -13,44 +13,49 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings + +from google.cloud.errorreporting_v1beta1.types import report_errors_service + + +from .rest_base import _BaseReportErrorsServiceRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore -from google.cloud.errorreporting_v1beta1.types import report_errors_service - -from .base import ( - ReportErrorsServiceTransport, - DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, -) + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -86,9 +91,10 @@ def post_report_error_event(self, response): def pre_report_error_event( self, request: report_errors_service.ReportErrorEventRequest, - metadata: Sequence[Tuple[str, str]], + metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - report_errors_service.ReportErrorEventRequest, Sequence[Tuple[str, str]] + report_errors_service.ReportErrorEventRequest, + Sequence[Tuple[str, Union[str, bytes]]], ]: """Pre-rpc interceptor for report_error_event @@ -102,12 +108,38 @@ def post_report_error_event( ) -> report_errors_service.ReportErrorEventResponse: """Post-rpc interceptor for report_error_event - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_report_error_event_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the ReportErrorsService server but before - it is returned to user code. + it is returned to user code. This `post_report_error_event` interceptor runs + before the `post_report_error_event_with_metadata` interceptor. """ return response + def post_report_error_event_with_metadata( + self, + response: report_errors_service.ReportErrorEventResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + report_errors_service.ReportErrorEventResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for report_error_event + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ReportErrorsService server but before it is returned to user code. + + We recommend only using this `post_report_error_event_with_metadata` + interceptor in new development instead of the `post_report_error_event` interceptor. + When both interceptors are used, this `post_report_error_event_with_metadata` interceptor runs after the + `post_report_error_event` interceptor. The (possibly modified) response returned by + `post_report_error_event` will be passed to + `post_report_error_event_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class ReportErrorsServiceRestStub: @@ -116,8 +148,8 @@ class ReportErrorsServiceRestStub: _interceptor: ReportErrorsServiceRestInterceptor -class ReportErrorsServiceRestTransport(ReportErrorsServiceTransport): - """REST backend transport for ReportErrorsService. +class ReportErrorsServiceRestTransport(_BaseReportErrorsServiceRestTransport): + """REST backend synchronous transport for ReportErrorsService. An API for reporting error events. @@ -126,7 +158,6 @@ class ReportErrorsServiceRestTransport(ReportErrorsServiceTransport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - """ def __init__( @@ -180,21 +211,12 @@ def __init__( # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, api_audience=api_audience, ) self._session = AuthorizedSession( @@ -205,19 +227,35 @@ def __init__( self._interceptor = interceptor or ReportErrorsServiceRestInterceptor() self._prep_wrapped_messages(client_info) - class _ReportErrorEvent(ReportErrorsServiceRestStub): + class _ReportErrorEvent( + _BaseReportErrorsServiceRestTransport._BaseReportErrorEvent, + ReportErrorsServiceRestStub, + ): def __hash__(self): - return hash("ReportErrorEvent") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("ReportErrorsServiceRestTransport.ReportErrorEvent") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -225,7 +263,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> report_errors_service.ReportErrorEventResponse: r"""Call the report error event method over HTTP. @@ -236,8 +274,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.report_errors_service.ReportErrorEventResponse: @@ -247,47 +287,62 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1beta1/{project_name=projects/*}/events:report", - "body": "event", - }, - ] + http_options = ( + _BaseReportErrorsServiceRestTransport._BaseReportErrorEvent._get_http_options() + ) + request, metadata = self._interceptor.pre_report_error_event( request, metadata ) - pb_request = report_errors_service.ReportErrorEventRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseReportErrorsServiceRestTransport._BaseReportErrorEvent._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseReportErrorsServiceRestTransport._BaseReportErrorEvent._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseReportErrorsServiceRestTransport._BaseReportErrorEvent._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - query_params["$alt"] = "json;enum-encoding=int" + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.devtools.clouderrorreporting_v1beta1.ReportErrorsServiceClient.ReportErrorEvent", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService", + "rpcName": "ReportErrorEvent", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = ReportErrorsServiceRestTransport._ReportErrorEvent._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -300,7 +355,35 @@ def __call__( pb_resp = report_errors_service.ReportErrorEventResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_report_error_event(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_report_error_event_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + report_errors_service.ReportErrorEventResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.devtools.clouderrorreporting_v1beta1.ReportErrorsServiceClient.report_error_event", + extra={ + "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService", + "rpcName": "ReportErrorEvent", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest_base.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest_base.py new file mode 100644 index 00000000..61e2fbf1 --- /dev/null +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest_base.py @@ -0,0 +1,150 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from .base import ReportErrorsServiceTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.cloud.errorreporting_v1beta1.types import report_errors_service + + +class _BaseReportErrorsServiceRestTransport(ReportErrorsServiceTransport): + """Base REST backend transport for ReportErrorsService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "clouderrorreporting.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'clouderrorreporting.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseReportErrorEvent: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta1/{project_name=projects/*}/events:report", + "body": "event", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = report_errors_service.ReportErrorEventRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseReportErrorsServiceRestTransport._BaseReportErrorEvent._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + +__all__ = ("_BaseReportErrorsServiceRestTransport",) diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index f8a87619..f3b984e5 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "1.11.1" + "version": "0.1.0" }, "snippets": [ { @@ -47,7 +47,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.errorreporting_v1beta1.types.ErrorGroup", @@ -127,7 +127,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.errorreporting_v1beta1.types.ErrorGroup", @@ -208,7 +208,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.errorreporting_v1beta1.types.ErrorGroup", @@ -288,7 +288,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.errorreporting_v1beta1.types.ErrorGroup", @@ -369,7 +369,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.errorreporting_v1beta1.types.DeleteEventsResponse", @@ -449,7 +449,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.errorreporting_v1beta1.types.DeleteEventsResponse", @@ -534,7 +534,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.errorreporting_v1beta1.services.error_stats_service.pagers.ListEventsAsyncPager", @@ -618,7 +618,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.errorreporting_v1beta1.services.error_stats_service.pagers.ListEventsPager", @@ -703,7 +703,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.errorreporting_v1beta1.services.error_stats_service.pagers.ListGroupStatsAsyncPager", @@ -787,7 +787,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.errorreporting_v1beta1.services.error_stats_service.pagers.ListGroupStatsPager", @@ -872,7 +872,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.errorreporting_v1beta1.types.ReportErrorEventResponse", @@ -956,7 +956,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.cloud.errorreporting_v1beta1.types.ReportErrorEventResponse", diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt index e69de29b..ed7f9aed 100644 --- a/testing/constraints-3.13.txt +++ b/testing/constraints-3.13.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py index e6e0089b..cacaa620 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py @@ -24,7 +24,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -37,6 +37,13 @@ from requests.sessions import Session from google.protobuf import json_format +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -59,10 +66,32 @@ import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -312,83 +341,46 @@ def test__get_universe_domain(): @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "error_code,cred_info_json,show_cred_info", [ - (ErrorGroupServiceClient, transports.ErrorGroupServiceGrpcTransport, "grpc"), - (ErrorGroupServiceClient, transports.ErrorGroupServiceRestTransport, "rest"), + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), ], ) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ErrorGroupServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ErrorGroupServiceClient(credentials=cred) + client._transport._credentials = cred - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize( @@ -1202,25 +1194,6 @@ def test_get_group(request_type, transport: str = "grpc"): assert response.resolution_status == common.ResolutionStatus.OPEN -def test_get_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ErrorGroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_group), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == error_group_service.GetGroupRequest() - - def test_get_group_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1284,38 +1257,13 @@ def test_get_group_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_group_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ErrorGroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_group), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common.ErrorGroup( - name="name_value", - group_id="group_id_value", - resolution_status=common.ResolutionStatus.OPEN, - ) - ) - response = await client.get_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == error_group_service.GetGroupRequest() - - @pytest.mark.asyncio async def test_get_group_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ErrorGroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1354,7 +1302,7 @@ async def test_get_group_async( transport: str = "grpc_asyncio", request_type=error_group_service.GetGroupRequest ): client = ErrorGroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1424,7 +1372,7 @@ def test_get_group_field_headers(): @pytest.mark.asyncio async def test_get_group_field_headers_async(): client = ErrorGroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1492,7 +1440,7 @@ def test_get_group_flattened_error(): @pytest.mark.asyncio async def test_get_group_flattened_async(): client = ErrorGroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1519,7 +1467,7 @@ async def test_get_group_flattened_async(): @pytest.mark.asyncio async def test_get_group_flattened_error_async(): client = ErrorGroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1571,25 +1519,6 @@ def test_update_group(request_type, transport: str = "grpc"): assert response.resolution_status == common.ResolutionStatus.OPEN -def test_update_group_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ErrorGroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_group), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == error_group_service.UpdateGroupRequest() - - def test_update_group_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1649,31 +1578,6 @@ def test_update_group_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_group_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ErrorGroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_group), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - common.ErrorGroup( - name="name_value", - group_id="group_id_value", - resolution_status=common.ResolutionStatus.OPEN, - ) - ) - response = await client.update_group() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == error_group_service.UpdateGroupRequest() - - @pytest.mark.asyncio async def test_update_group_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1682,7 +1586,7 @@ async def test_update_group_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ErrorGroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1721,7 +1625,7 @@ async def test_update_group_async( transport: str = "grpc_asyncio", request_type=error_group_service.UpdateGroupRequest ): client = ErrorGroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1791,7 +1695,7 @@ def test_update_group_field_headers(): @pytest.mark.asyncio async def test_update_group_field_headers_async(): client = ErrorGroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1859,7 +1763,7 @@ def test_update_group_flattened_error(): @pytest.mark.asyncio async def test_update_group_flattened_async(): client = ErrorGroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1886,7 +1790,7 @@ async def test_update_group_flattened_async(): @pytest.mark.asyncio async def test_update_group_flattened_error_async(): client = ErrorGroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1898,50 +1802,6 @@ async def test_update_group_flattened_error_async(): ) -@pytest.mark.parametrize( - "request_type", - [ - error_group_service.GetGroupRequest, - dict, - ], -) -def test_get_group_rest(request_type): - client = ErrorGroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"group_name": "projects/sample1/groups/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common.ErrorGroup( - name="name_value", - group_id="group_id_value", - resolution_status=common.ResolutionStatus.OPEN, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common.ErrorGroup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_group(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common.ErrorGroup) - assert response.name == "name_value" - assert response.group_id == "group_id_value" - assert response.resolution_status == common.ResolutionStatus.OPEN - - def test_get_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2044,6 +1904,7 @@ def test_get_group_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_group(request) @@ -2061,85 +1922,6 @@ def test_get_group_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("groupName",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_group_rest_interceptors(null_interceptor): - transport = transports.ErrorGroupServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ErrorGroupServiceRestInterceptor(), - ) - client = ErrorGroupServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ErrorGroupServiceRestInterceptor, "post_get_group" - ) as post, mock.patch.object( - transports.ErrorGroupServiceRestInterceptor, "pre_get_group" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = error_group_service.GetGroupRequest.pb( - error_group_service.GetGroupRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common.ErrorGroup.to_json(common.ErrorGroup()) - - request = error_group_service.GetGroupRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = common.ErrorGroup() - - client.get_group( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_group_rest_bad_request( - transport: str = "rest", request_type=error_group_service.GetGroupRequest -): - client = ErrorGroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"group_name": "projects/sample1/groups/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_group(request) - - def test_get_group_rest_flattened(): client = ErrorGroupServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2168,6 +1950,7 @@ def test_get_group_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_group(**mock_args) @@ -2196,144 +1979,21 @@ def test_get_group_rest_flattened_error(transport: str = "rest"): ) -def test_get_group_rest_error(): - client = ErrorGroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) +def test_update_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -@pytest.mark.parametrize( - "request_type", - [ - error_group_service.UpdateGroupRequest, - dict, - ], -) -def test_update_group_rest(request_type): - client = ErrorGroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"group": {"name": "projects/sample1/groups/sample2"}} - request_init["group"] = { - "name": "projects/sample1/groups/sample2", - "group_id": "group_id_value", - "tracking_issues": [{"url": "url_value"}], - "resolution_status": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = error_group_service.UpdateGroupRequest.meta.fields["group"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["group"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["group"][field])): - del request_init["group"][field][i][subfield] - else: - del request_init["group"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common.ErrorGroup( - name="name_value", - group_id="group_id_value", - resolution_status=common.ResolutionStatus.OPEN, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common.ErrorGroup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_group(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, common.ErrorGroup) - assert response.name == "name_value" - assert response.group_id == "group_id_value" - assert response.resolution_status == common.ResolutionStatus.OPEN - - -def test_update_group_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ErrorGroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.update_group in client._transport._wrapped_methods + # Ensure method has been cached + assert client._transport.update_group in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -2415,23 +2075,579 @@ def test_update_group_rest_required_fields( return_value = common.ErrorGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_group_rest_unset_required_fields(): + transport = transports.ErrorGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("group",))) + + +def test_update_group_rest_flattened(): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common.ErrorGroup() + + # get arguments that satisfy an http rule for this method + sample_request = {"group": {"name": "projects/sample1/groups/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + group=common.ErrorGroup(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = common.ErrorGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{group.name=projects/*/groups/*}" % client.transport._host, + args[1], + ) + + +def test_update_group_rest_flattened_error(transport: str = "rest"): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_group( + error_group_service.UpdateGroupRequest(), + group=common.ErrorGroup(name="name_value"), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ErrorGroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ErrorGroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ErrorGroupServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ErrorGroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ErrorGroupServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ErrorGroupServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ErrorGroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ErrorGroupServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ErrorGroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ErrorGroupServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ErrorGroupServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ErrorGroupServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ErrorGroupServiceGrpcTransport, + transports.ErrorGroupServiceGrpcAsyncIOTransport, + transports.ErrorGroupServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = ErrorGroupServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_group_empty_call_grpc(): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_group), "__call__") as call: + call.return_value = common.ErrorGroup() + client.get_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = error_group_service.GetGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_group_empty_call_grpc(): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_group), "__call__") as call: + call.return_value = common.ErrorGroup() + client.update_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = error_group_service.UpdateGroupRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = ErrorGroupServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = ErrorGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_group_empty_call_grpc_asyncio(): + client = ErrorGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_group), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.ErrorGroup( + name="name_value", + group_id="group_id_value", + resolution_status=common.ResolutionStatus.OPEN, + ) + ) + await client.get_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = error_group_service.GetGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_group_empty_call_grpc_asyncio(): + client = ErrorGroupServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_group), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + common.ErrorGroup( + name="name_value", + group_id="group_id_value", + resolution_status=common.ResolutionStatus.OPEN, + ) + ) + await client.update_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = error_group_service.UpdateGroupRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = ErrorGroupServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_get_group_rest_bad_request(request_type=error_group_service.GetGroupRequest): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"group_name": "projects/sample1/groups/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + error_group_service.GetGroupRequest, + dict, + ], +) +def test_get_group_rest_call_success(request_type): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"group_name": "projects/sample1/groups/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common.ErrorGroup( + name="name_value", + group_id="group_id_value", + resolution_status=common.ResolutionStatus.OPEN, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = common.ErrorGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_group(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, common.ErrorGroup) + assert response.name == "name_value" + assert response.group_id == "group_id_value" + assert response.resolution_status == common.ResolutionStatus.OPEN + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_group_rest_interceptors(null_interceptor): + transport = transports.ErrorGroupServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ErrorGroupServiceRestInterceptor(), + ) + client = ErrorGroupServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ErrorGroupServiceRestInterceptor, "post_get_group" + ) as post, mock.patch.object( + transports.ErrorGroupServiceRestInterceptor, "post_get_group_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.ErrorGroupServiceRestInterceptor, "pre_get_group" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = error_group_service.GetGroupRequest.pb( + error_group_service.GetGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = common.ErrorGroup.to_json(common.ErrorGroup()) + req.return_value.content = return_value + + request = error_group_service.GetGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = common.ErrorGroup() + post_with_metadata.return_value = common.ErrorGroup(), metadata + + client.get_group( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_group_rest_bad_request( + request_type=error_group_service.UpdateGroupRequest, +): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"group": {"name": "projects/sample1/groups/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + error_group_service.UpdateGroupRequest, + dict, + ], +) +def test_update_group_rest_call_success(request_type): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"group": {"name": "projects/sample1/groups/sample2"}} + request_init["group"] = { + "name": "projects/sample1/groups/sample2", + "group_id": "group_id_value", + "tracking_issues": [{"url": "url_value"}], + "resolution_status": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = error_group_service.UpdateGroupRequest.meta.fields["group"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["group"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - response = client.update_group(request) + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["group"][field])): + del request_init["group"][field][i][subfield] + else: + del request_init["group"][field][subfield] + request = request_type(**request_init) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = common.ErrorGroup( + name="name_value", + group_id="group_id_value", + resolution_status=common.ResolutionStatus.OPEN, + ) + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 -def test_update_group_rest_unset_required_fields(): - transport = transports.ErrorGroupServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) + # Convert return value to protobuf type + return_value = common.ErrorGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_group(request) - unset_fields = transport.update_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("group",))) + # Establish that the response is the type that we expect. + assert isinstance(response, common.ErrorGroup) + assert response.name == "name_value" + assert response.group_id == "group_id_value" + assert response.resolution_status == common.ResolutionStatus.OPEN @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -2443,6 +2659,7 @@ def test_update_group_rest_interceptors(null_interceptor): else transports.ErrorGroupServiceRestInterceptor(), ) client = ErrorGroupServiceClient(transport=transport) + with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( @@ -2450,10 +2667,13 @@ def test_update_group_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.ErrorGroupServiceRestInterceptor, "post_update_group" ) as post, mock.patch.object( + transports.ErrorGroupServiceRestInterceptor, "post_update_group_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.ErrorGroupServiceRestInterceptor, "pre_update_group" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = error_group_service.UpdateGroupRequest.pb( error_group_service.UpdateGroupRequest() ) @@ -2464,10 +2684,11 @@ def test_update_group_rest_interceptors(null_interceptor): "query_params": pb_message, } - req.return_value = Response() + req.return_value = mock.Mock() req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = common.ErrorGroup.to_json(common.ErrorGroup()) + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = common.ErrorGroup.to_json(common.ErrorGroup()) + req.return_value.content = return_value request = error_group_service.UpdateGroupRequest() metadata = [ @@ -2476,6 +2697,7 @@ def test_update_group_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = common.ErrorGroup() + post_with_metadata.return_value = common.ErrorGroup(), metadata client.update_group( request, @@ -2487,198 +2709,54 @@ def test_update_group_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() -def test_update_group_rest_bad_request( - transport: str = "rest", request_type=error_group_service.UpdateGroupRequest -): +def test_initialize_client_w_rest(): client = ErrorGroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - - # send a request that will satisfy transcoding - request_init = {"group": {"name": "projects/sample1/groups/sample2"}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_group(request) + assert client is not None -def test_update_group_rest_flattened(): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_group_empty_call_rest(): client = ErrorGroupServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = common.ErrorGroup() - - # get arguments that satisfy an http rule for this method - sample_request = {"group": {"name": "projects/sample1/groups/sample2"}} - - # get truthy value for each flattened field - mock_args = dict( - group=common.ErrorGroup(name="name_value"), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = common.ErrorGroup.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.update_group(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta1/{group.name=projects/*/groups/*}" % client.transport._host, - args[1], - ) - + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_group), "__call__") as call: + client.get_group(request=None) -def test_update_group_rest_flattened_error(transport: str = "rest"): - client = ErrorGroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = error_group_service.GetGroupRequest() - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_group( - error_group_service.UpdateGroupRequest(), - group=common.ErrorGroup(name="name_value"), - ) + assert args[0] == request_msg -def test_update_group_rest_error(): +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_group_empty_call_rest(): client = ErrorGroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ErrorGroupServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ErrorGroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ErrorGroupServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ErrorGroupServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.ErrorGroupServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ErrorGroupServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ErrorGroupServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.ErrorGroupServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ErrorGroupServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ErrorGroupServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ErrorGroupServiceClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ErrorGroupServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.ErrorGroupServiceGrpcAsyncIOTransport( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ErrorGroupServiceGrpcTransport, - transports.ErrorGroupServiceGrpcAsyncIOTransport, - transports.ErrorGroupServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_group), "__call__") as call: + client.update_group(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = error_group_service.UpdateGroupRequest() -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = ErrorGroupServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name + assert args[0] == request_msg def test_transport_grpc_default(): @@ -3259,36 +3337,41 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) +def test_transport_close_grpc(): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_transport_close_grpc_asyncio(): client = ErrorGroupServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" + type(getattr(client.transport, "_grpc_channel")), "close" ) as close: async with client: close.assert_not_called() close.assert_called_once() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = ErrorGroupServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() +def test_transport_close_rest(): + client = ErrorGroupServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() def test_client_ctx(): diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py index 9241fa39..437f9b4a 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py @@ -24,7 +24,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -37,6 +37,13 @@ from requests.sessions import Session from google.protobuf import json_format +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -62,10 +69,32 @@ import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -315,83 +344,46 @@ def test__get_universe_domain(): @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "error_code,cred_info_json,show_cred_info", [ - (ErrorStatsServiceClient, transports.ErrorStatsServiceGrpcTransport, "grpc"), - (ErrorStatsServiceClient, transports.ErrorStatsServiceRestTransport, "rest"), + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), ], ) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ErrorStatsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ErrorStatsServiceClient(credentials=cred) + client._transport._credentials = cred - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize( @@ -1201,25 +1193,6 @@ def test_list_group_stats(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_group_stats_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_group_stats), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_group_stats() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == error_stats_service.ListGroupStatsRequest() - - def test_list_group_stats_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1287,29 +1260,6 @@ def test_list_group_stats_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_group_stats_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_group_stats), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - error_stats_service.ListGroupStatsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_group_stats() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == error_stats_service.ListGroupStatsRequest() - - @pytest.mark.asyncio async def test_list_group_stats_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1318,7 +1268,7 @@ async def test_list_group_stats_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1358,7 +1308,7 @@ async def test_list_group_stats_async( request_type=error_stats_service.ListGroupStatsRequest, ): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1424,7 +1374,7 @@ def test_list_group_stats_field_headers(): @pytest.mark.asyncio async def test_list_group_stats_field_headers_async(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1505,7 +1455,7 @@ def test_list_group_stats_flattened_error(): @pytest.mark.asyncio async def test_list_group_stats_flattened_async(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1542,7 +1492,7 @@ async def test_list_group_stats_flattened_async(): @pytest.mark.asyncio async def test_list_group_stats_flattened_error_async(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1655,7 +1605,7 @@ def test_list_group_stats_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_group_stats_async_pager(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1707,7 +1657,7 @@ async def test_list_group_stats_async_pager(): @pytest.mark.asyncio async def test_list_group_stats_async_pages(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1789,25 +1739,6 @@ def test_list_events(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_events_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_events), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_events() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == error_stats_service.ListEventsRequest() - - def test_list_events_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1875,29 +1806,6 @@ def test_list_events_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_events_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_events), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - error_stats_service.ListEventsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_events() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == error_stats_service.ListEventsRequest() - - @pytest.mark.asyncio async def test_list_events_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1906,7 +1814,7 @@ async def test_list_events_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1945,7 +1853,7 @@ async def test_list_events_async( transport: str = "grpc_asyncio", request_type=error_stats_service.ListEventsRequest ): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2011,7 +1919,7 @@ def test_list_events_field_headers(): @pytest.mark.asyncio async def test_list_events_field_headers_async(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2086,7 +1994,7 @@ def test_list_events_flattened_error(): @pytest.mark.asyncio async def test_list_events_flattened_async(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2119,7 +2027,7 @@ async def test_list_events_flattened_async(): @pytest.mark.asyncio async def test_list_events_flattened_error_async(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2230,7 +2138,7 @@ def test_list_events_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_events_async_pager(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2280,7 +2188,7 @@ async def test_list_events_async_pager(): @pytest.mark.asyncio async def test_list_events_async_pages(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2359,25 +2267,6 @@ def test_delete_events(request_type, transport: str = "grpc"): assert isinstance(response, error_stats_service.DeleteEventsResponse) -def test_delete_events_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_events), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_events() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == error_stats_service.DeleteEventsRequest() - - def test_delete_events_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2441,27 +2330,6 @@ def test_delete_events_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_events_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_events), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - error_stats_service.DeleteEventsResponse() - ) - response = await client.delete_events() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == error_stats_service.DeleteEventsRequest() - - @pytest.mark.asyncio async def test_delete_events_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2470,7 +2338,7 @@ async def test_delete_events_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2510,7 +2378,7 @@ async def test_delete_events_async( request_type=error_stats_service.DeleteEventsRequest, ): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2573,7 +2441,7 @@ def test_delete_events_field_headers(): @pytest.mark.asyncio async def test_delete_events_field_headers_async(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2643,7 +2511,7 @@ def test_delete_events_flattened_error(): @pytest.mark.asyncio async def test_delete_events_flattened_async(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2672,7 +2540,7 @@ async def test_delete_events_flattened_async(): @pytest.mark.asyncio async def test_delete_events_flattened_error_async(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2684,46 +2552,6 @@ async def test_delete_events_flattened_error_async(): ) -@pytest.mark.parametrize( - "request_type", - [ - error_stats_service.ListGroupStatsRequest, - dict, - ], -) -def test_list_group_stats_rest(request_type): - client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project_name": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = error_stats_service.ListGroupStatsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = error_stats_service.ListGroupStatsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_group_stats(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListGroupStatsPager) - assert response.next_page_token == "next_page_token_value" - - def test_list_group_stats_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -2842,6 +2670,7 @@ def test_list_group_stats_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_group_stats(request) @@ -2874,120 +2703,40 @@ def test_list_group_stats_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_group_stats_rest_interceptors(null_interceptor): - transport = transports.ErrorStatsServiceRestTransport( +def test_list_group_stats_rest_flattened(): + client = ErrorStatsServiceClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ErrorStatsServiceRestInterceptor(), + transport="rest", ) - client = ErrorStatsServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ErrorStatsServiceRestInterceptor, "post_list_group_stats" - ) as post, mock.patch.object( - transports.ErrorStatsServiceRestInterceptor, "pre_list_group_stats" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = error_stats_service.ListGroupStatsRequest.pb( - error_stats_service.ListGroupStatsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = error_stats_service.ListGroupStatsResponse.to_json( - error_stats_service.ListGroupStatsResponse() - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = error_stats_service.ListGroupStatsResponse() - request = error_stats_service.ListGroupStatsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = error_stats_service.ListGroupStatsResponse() + # get arguments that satisfy an http rule for this method + sample_request = {"project_name": "projects/sample1"} - client.list_group_stats( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # get truthy value for each flattened field + mock_args = dict( + project_name="project_name_value", + time_range=error_stats_service.QueryTimeRange( + period=error_stats_service.QueryTimeRange.Period.PERIOD_1_HOUR + ), ) + mock_args.update(sample_request) - pre.assert_called_once() - post.assert_called_once() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = error_stats_service.ListGroupStatsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - -def test_list_group_stats_rest_bad_request( - transport: str = "rest", request_type=error_stats_service.ListGroupStatsRequest -): - client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project_name": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_group_stats(request) - - -def test_list_group_stats_rest_flattened(): - client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = error_stats_service.ListGroupStatsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"project_name": "projects/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - project_name="project_name_value", - time_range=error_stats_service.QueryTimeRange( - period=error_stats_service.QueryTimeRange.Period.PERIOD_1_HOUR - ), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = error_stats_service.ListGroupStatsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.list_group_stats(**mock_args) + client.list_group_stats(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -3080,46 +2829,6 @@ def test_list_group_stats_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - error_stats_service.ListEventsRequest, - dict, - ], -) -def test_list_events_rest(request_type): - client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project_name": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = error_stats_service.ListEventsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = error_stats_service.ListEventsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_events(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEventsPager) - assert response.next_page_token == "next_page_token_value" - - def test_list_events_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3239,6 +2948,7 @@ def test_list_events_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_events(request) @@ -3278,87 +2988,6 @@ def test_list_events_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_events_rest_interceptors(null_interceptor): - transport = transports.ErrorStatsServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ErrorStatsServiceRestInterceptor(), - ) - client = ErrorStatsServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ErrorStatsServiceRestInterceptor, "post_list_events" - ) as post, mock.patch.object( - transports.ErrorStatsServiceRestInterceptor, "pre_list_events" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = error_stats_service.ListEventsRequest.pb( - error_stats_service.ListEventsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = error_stats_service.ListEventsResponse.to_json( - error_stats_service.ListEventsResponse() - ) - - request = error_stats_service.ListEventsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = error_stats_service.ListEventsResponse() - - client.list_events( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_events_rest_bad_request( - transport: str = "rest", request_type=error_stats_service.ListEventsRequest -): - client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project_name": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_events(request) - - def test_list_events_rest_flattened(): client = ErrorStatsServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3388,6 +3017,7 @@ def test_list_events_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_events(**mock_args) @@ -3480,43 +3110,6 @@ def test_list_events_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - error_stats_service.DeleteEventsRequest, - dict, - ], -) -def test_delete_events_rest(request_type): - client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project_name": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = error_stats_service.DeleteEventsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = error_stats_service.DeleteEventsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_events(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, error_stats_service.DeleteEventsResponse) - - def test_delete_events_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3619,6 +3212,7 @@ def test_delete_events_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_events(request) @@ -3632,12 +3226,536 @@ def test_delete_events_rest_unset_required_fields(): credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_events._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("projectName",))) + unset_fields = transport.delete_events._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("projectName",))) + + +def test_delete_events_rest_flattened(): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = error_stats_service.DeleteEventsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_name": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project_name="project_name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = error_stats_service.DeleteEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_events(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta1/{project_name=projects/*}/events" % client.transport._host, + args[1], + ) + + +def test_delete_events_rest_flattened_error(transport: str = "rest"): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_events( + error_stats_service.DeleteEventsRequest(), + project_name="project_name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ErrorStatsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ErrorStatsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ErrorStatsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ErrorStatsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ErrorStatsServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ErrorStatsServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ErrorStatsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ErrorStatsServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ErrorStatsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ErrorStatsServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ErrorStatsServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ErrorStatsServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ErrorStatsServiceGrpcTransport, + transports.ErrorStatsServiceGrpcAsyncIOTransport, + transports.ErrorStatsServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = ErrorStatsServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_group_stats_empty_call_grpc(): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_group_stats), "__call__") as call: + call.return_value = error_stats_service.ListGroupStatsResponse() + client.list_group_stats(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = error_stats_service.ListGroupStatsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_events_empty_call_grpc(): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_events), "__call__") as call: + call.return_value = error_stats_service.ListEventsResponse() + client.list_events(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = error_stats_service.ListEventsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_events_empty_call_grpc(): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_events), "__call__") as call: + call.return_value = error_stats_service.DeleteEventsResponse() + client.delete_events(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = error_stats_service.DeleteEventsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = ErrorStatsServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = ErrorStatsServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_group_stats_empty_call_grpc_asyncio(): + client = ErrorStatsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_group_stats), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + error_stats_service.ListGroupStatsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_group_stats(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = error_stats_service.ListGroupStatsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_events_empty_call_grpc_asyncio(): + client = ErrorStatsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_events), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + error_stats_service.ListEventsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_events(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = error_stats_service.ListEventsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_events_empty_call_grpc_asyncio(): + client = ErrorStatsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_events), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + error_stats_service.DeleteEventsResponse() + ) + await client.delete_events(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = error_stats_service.DeleteEventsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = ErrorStatsServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_group_stats_rest_bad_request( + request_type=error_stats_service.ListGroupStatsRequest, +): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project_name": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_group_stats(request) + + +@pytest.mark.parametrize( + "request_type", + [ + error_stats_service.ListGroupStatsRequest, + dict, + ], +) +def test_list_group_stats_rest_call_success(request_type): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project_name": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = error_stats_service.ListGroupStatsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = error_stats_service.ListGroupStatsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_group_stats(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGroupStatsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_group_stats_rest_interceptors(null_interceptor): + transport = transports.ErrorStatsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ErrorStatsServiceRestInterceptor(), + ) + client = ErrorStatsServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ErrorStatsServiceRestInterceptor, "post_list_group_stats" + ) as post, mock.patch.object( + transports.ErrorStatsServiceRestInterceptor, + "post_list_group_stats_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.ErrorStatsServiceRestInterceptor, "pre_list_group_stats" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = error_stats_service.ListGroupStatsRequest.pb( + error_stats_service.ListGroupStatsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = error_stats_service.ListGroupStatsResponse.to_json( + error_stats_service.ListGroupStatsResponse() + ) + req.return_value.content = return_value + + request = error_stats_service.ListGroupStatsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = error_stats_service.ListGroupStatsResponse() + post_with_metadata.return_value = ( + error_stats_service.ListGroupStatsResponse(), + metadata, + ) + + client.list_group_stats( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_events_rest_bad_request( + request_type=error_stats_service.ListEventsRequest, +): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project_name": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_events(request) + + +@pytest.mark.parametrize( + "request_type", + [ + error_stats_service.ListEventsRequest, + dict, + ], +) +def test_list_events_rest_call_success(request_type): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project_name": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = error_stats_service.ListEventsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = error_stats_service.ListEventsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_events(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEventsPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_events_rest_interceptors(null_interceptor): +def test_list_events_rest_interceptors(null_interceptor): transport = transports.ErrorStatsServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -3645,19 +3763,23 @@ def test_delete_events_rest_interceptors(null_interceptor): else transports.ErrorStatsServiceRestInterceptor(), ) client = ErrorStatsServiceClient(transport=transport) + with mock.patch.object( type(client.transport._session), "request" ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.ErrorStatsServiceRestInterceptor, "post_delete_events" + transports.ErrorStatsServiceRestInterceptor, "post_list_events" ) as post, mock.patch.object( - transports.ErrorStatsServiceRestInterceptor, "pre_delete_events" + transports.ErrorStatsServiceRestInterceptor, "post_list_events_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.ErrorStatsServiceRestInterceptor, "pre_list_events" ) as pre: pre.assert_not_called() post.assert_not_called() - pb_message = error_stats_service.DeleteEventsRequest.pb( - error_stats_service.DeleteEventsRequest() + post_with_metadata.assert_not_called() + pb_message = error_stats_service.ListEventsRequest.pb( + error_stats_service.ListEventsRequest() ) transcode.return_value = { "method": "post", @@ -3666,22 +3788,27 @@ def test_delete_events_rest_interceptors(null_interceptor): "query_params": pb_message, } - req.return_value = Response() + req.return_value = mock.Mock() req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = error_stats_service.DeleteEventsResponse.to_json( - error_stats_service.DeleteEventsResponse() + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = error_stats_service.ListEventsResponse.to_json( + error_stats_service.ListEventsResponse() ) + req.return_value.content = return_value - request = error_stats_service.DeleteEventsRequest() + request = error_stats_service.ListEventsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = error_stats_service.DeleteEventsResponse() + post.return_value = error_stats_service.ListEventsResponse() + post_with_metadata.return_value = ( + error_stats_service.ListEventsResponse(), + metadata, + ) - client.delete_events( + client.list_events( request, metadata=[ ("key", "val"), @@ -3691,16 +3818,15 @@ def test_delete_events_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_events_rest_bad_request( - transport: str = "rest", request_type=error_stats_service.DeleteEventsRequest + request_type=error_stats_service.DeleteEventsRequest, ): client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # send a request that will satisfy transcoding request_init = {"project_name": "projects/sample1"} request = request_type(**request_init) @@ -3710,179 +3836,185 @@ def test_delete_events_rest_bad_request( core_exceptions.BadRequest ): # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 - response_value.request = Request() + response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_events(request) -def test_delete_events_rest_flattened(): +@pytest.mark.parametrize( + "request_type", + [ + error_stats_service.DeleteEventsRequest, + dict, + ], +) +def test_delete_events_rest_call_success(request_type): client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) + # send a request that will satisfy transcoding + request_init = {"project_name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = error_stats_service.DeleteEventsResponse() - # get arguments that satisfy an http rule for this method - sample_request = {"project_name": "projects/sample1"} - - # get truthy value for each flattened field - mock_args = dict( - project_name="project_name_value", - ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() + response_value = mock.Mock() response_value.status_code = 200 + # Convert return value to protobuf type return_value = error_stats_service.DeleteEventsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") + response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_events(request) - client.delete_events(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1beta1/{project_name=projects/*}/events" % client.transport._host, - args[1], - ) + # Establish that the response is the type that we expect. + assert isinstance(response, error_stats_service.DeleteEventsResponse) -def test_delete_events_rest_flattened_error(transport: str = "rest"): - client = ErrorStatsServiceClient( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_events_rest_interceptors(null_interceptor): + transport = transports.ErrorStatsServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + interceptor=None + if null_interceptor + else transports.ErrorStatsServiceRestInterceptor(), ) + client = ErrorStatsServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ErrorStatsServiceRestInterceptor, "post_delete_events" + ) as post, mock.patch.object( + transports.ErrorStatsServiceRestInterceptor, "post_delete_events_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.ErrorStatsServiceRestInterceptor, "pre_delete_events" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = error_stats_service.DeleteEventsRequest.pb( + error_stats_service.DeleteEventsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = error_stats_service.DeleteEventsResponse.to_json( + error_stats_service.DeleteEventsResponse() + ) + req.return_value.content = return_value + + request = error_stats_service.DeleteEventsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = error_stats_service.DeleteEventsResponse() + post_with_metadata.return_value = ( + error_stats_service.DeleteEventsResponse(), + metadata, + ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): client.delete_events( - error_stats_service.DeleteEventsRequest(), - project_name="project_name_value", + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + -def test_delete_events_rest_error(): +def test_initialize_client_w_rest(): client = ErrorStatsServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) + assert client is not None -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ErrorStatsServiceGrpcTransport( +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_group_stats_empty_call_rest(): + client = ErrorStatsServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.ErrorStatsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ErrorStatsServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_group_stats), "__call__") as call: + client.list_group_stats(request=None) - # It is an error to provide an api_key and a transport instance. - transport = transports.ErrorStatsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ErrorStatsServiceClient( - client_options=options, - transport=transport, - ) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = error_stats_service.ListGroupStatsRequest() - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ErrorStatsServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + assert args[0] == request_msg - # It is an error to provide scopes and a transport instance. - transport = transports.ErrorStatsServiceGrpcTransport( + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_events_empty_call_rest(): + client = ErrorStatsServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = ErrorStatsServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_events), "__call__") as call: + client.list_events(request=None) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ErrorStatsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ErrorStatsServiceClient(transport=transport) - assert client.transport is transport + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = error_stats_service.ListEventsRequest() + assert args[0] == request_msg -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ErrorStatsServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - transport = transports.ErrorStatsServiceGrpcAsyncIOTransport( +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_events_empty_call_rest(): + client = ErrorStatsServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize( - "transport_class", - [ - transports.ErrorStatsServiceGrpcTransport, - transports.ErrorStatsServiceGrpcAsyncIOTransport, - transports.ErrorStatsServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_events), "__call__") as call: + client.delete_events(request=None) + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = error_stats_service.DeleteEventsRequest() -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = ErrorStatsServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name + assert args[0] == request_msg def test_transport_grpc_default(): @@ -4467,36 +4599,41 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) +def test_transport_close_grpc(): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_transport_close_grpc_asyncio(): client = ErrorStatsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" + type(getattr(client.transport, "_grpc_channel")), "close" ) as close: async with client: close.assert_not_called() close.assert_called_once() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = ErrorStatsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() +def test_transport_close_rest(): + client = ErrorStatsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() def test_client_ctx(): diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py index ca1f397f..b8ad46b1 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py @@ -24,7 +24,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -37,6 +37,13 @@ from requests.sessions import Session from google.protobuf import json_format +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -62,10 +69,32 @@ import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -321,91 +350,46 @@ def test__get_universe_domain(): @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "error_code,cred_info_json,show_cred_info", [ - ( - ReportErrorsServiceClient, - transports.ReportErrorsServiceGrpcTransport, - "grpc", - ), - ( - ReportErrorsServiceClient, - transports.ReportErrorsServiceRestTransport, - "rest", - ), + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), ], ) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ReportErrorsServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ReportErrorsServiceClient(credentials=cred) + client._transport._credentials = cred - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) + client._add_cred_info_for_auth_errors(error) + assert error.details == [] @pytest.mark.parametrize( @@ -1230,27 +1214,6 @@ def test_report_error_event(request_type, transport: str = "grpc"): assert isinstance(response, report_errors_service.ReportErrorEventResponse) -def test_report_error_event_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ReportErrorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.report_error_event), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.report_error_event() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == report_errors_service.ReportErrorEventRequest() - - def test_report_error_event_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1320,29 +1283,6 @@ def test_report_error_event_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_report_error_event_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = ReportErrorsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.report_error_event), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - report_errors_service.ReportErrorEventResponse() - ) - response = await client.report_error_event() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == report_errors_service.ReportErrorEventRequest() - - @pytest.mark.asyncio async def test_report_error_event_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1351,7 +1291,7 @@ async def test_report_error_event_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = ReportErrorsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1391,7 +1331,7 @@ async def test_report_error_event_async( request_type=report_errors_service.ReportErrorEventRequest, ): client = ReportErrorsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1458,7 +1398,7 @@ def test_report_error_event_field_headers(): @pytest.mark.asyncio async def test_report_error_event_field_headers_async(): client = ReportErrorsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1543,7 +1483,7 @@ def test_report_error_event_flattened_error(): @pytest.mark.asyncio async def test_report_error_event_flattened_async(): client = ReportErrorsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1582,7 +1522,7 @@ async def test_report_error_event_flattened_async(): @pytest.mark.asyncio async def test_report_error_event_flattened_error_async(): client = ReportErrorsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1597,135 +1537,6 @@ async def test_report_error_event_flattened_error_async(): ) -@pytest.mark.parametrize( - "request_type", - [ - report_errors_service.ReportErrorEventRequest, - dict, - ], -) -def test_report_error_event_rest(request_type): - client = ReportErrorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project_name": "projects/sample1"} - request_init["event"] = { - "event_time": {"seconds": 751, "nanos": 543}, - "service_context": { - "service": "service_value", - "version": "version_value", - "resource_type": "resource_type_value", - }, - "message": "message_value", - "context": { - "http_request": { - "method": "method_value", - "url": "url_value", - "user_agent": "user_agent_value", - "referrer": "referrer_value", - "response_status_code": 2156, - "remote_ip": "remote_ip_value", - }, - "user": "user_value", - "report_location": { - "file_path": "file_path_value", - "line_number": 1168, - "function_name": "function_name_value", - }, - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = report_errors_service.ReportErrorEventRequest.meta.fields["event"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["event"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["event"][field])): - del request_init["event"][field][i][subfield] - else: - del request_init["event"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = report_errors_service.ReportErrorEventResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = report_errors_service.ReportErrorEventResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.report_error_event(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, report_errors_service.ReportErrorEventResponse) - - def test_report_error_event_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -1835,6 +1646,7 @@ def test_report_error_event_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.report_error_event(request) @@ -1860,102 +1672,19 @@ def test_report_error_event_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_report_error_event_rest_interceptors(null_interceptor): - transport = transports.ReportErrorsServiceRestTransport( +def test_report_error_event_rest_flattened(): + client = ReportErrorsServiceClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.ReportErrorsServiceRestInterceptor(), + transport="rest", ) - client = ReportErrorsServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.ReportErrorsServiceRestInterceptor, "post_report_error_event" - ) as post, mock.patch.object( - transports.ReportErrorsServiceRestInterceptor, "pre_report_error_event" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = report_errors_service.ReportErrorEventRequest.pb( - report_errors_service.ReportErrorEventRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = ( - report_errors_service.ReportErrorEventResponse.to_json( - report_errors_service.ReportErrorEventResponse() - ) - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_errors_service.ReportErrorEventResponse() - request = report_errors_service.ReportErrorEventRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = report_errors_service.ReportErrorEventResponse() - - client.report_error_event( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_report_error_event_rest_bad_request( - transport: str = "rest", request_type=report_errors_service.ReportErrorEventRequest -): - client = ReportErrorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project_name": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.report_error_event(request) - - -def test_report_error_event_rest_flattened(): - client = ReportErrorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = report_errors_service.ReportErrorEventResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"project_name": "projects/sample1"} + # get arguments that satisfy an http rule for this method + sample_request = {"project_name": "projects/sample1"} # get truthy value for each flattened field mock_args = dict( @@ -1974,6 +1703,7 @@ def test_report_error_event_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.report_error_event(**mock_args) @@ -2006,12 +1736,6 @@ def test_report_error_event_rest_flattened_error(transport: str = "rest"): ) -def test_report_error_event_rest_error(): - client = ReportErrorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ReportErrorsServiceGrpcTransport( @@ -2104,18 +1828,340 @@ def test_transport_adc(transport_class): adc.assert_called_once() +def test_transport_kind_grpc(): + transport = ReportErrorsServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ReportErrorsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_report_error_event_empty_call_grpc(): + client = ReportErrorsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.report_error_event), "__call__" + ) as call: + call.return_value = report_errors_service.ReportErrorEventResponse() + client.report_error_event(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = report_errors_service.ReportErrorEventRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = ReportErrorsServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = ReportErrorsServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_report_error_event_empty_call_grpc_asyncio(): + client = ReportErrorsServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.report_error_event), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + report_errors_service.ReportErrorEventResponse() + ) + await client.report_error_event(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = report_errors_service.ReportErrorEventRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = ReportErrorsServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_report_error_event_rest_bad_request( + request_type=report_errors_service.ReportErrorEventRequest, +): + client = ReportErrorsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project_name": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.report_error_event(request) + + @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", - "rest", + report_errors_service.ReportErrorEventRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = ReportErrorsServiceClient.get_transport_class(transport_name)( +def test_report_error_event_rest_call_success(request_type): + client = ReportErrorsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project_name": "projects/sample1"} + request_init["event"] = { + "event_time": {"seconds": 751, "nanos": 543}, + "service_context": { + "service": "service_value", + "version": "version_value", + "resource_type": "resource_type_value", + }, + "message": "message_value", + "context": { + "http_request": { + "method": "method_value", + "url": "url_value", + "user_agent": "user_agent_value", + "referrer": "referrer_value", + "response_status_code": 2156, + "remote_ip": "remote_ip_value", + }, + "user": "user_value", + "report_location": { + "file_path": "file_path_value", + "line_number": 1168, + "function_name": "function_name_value", + }, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = report_errors_service.ReportErrorEventRequest.meta.fields["event"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["event"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["event"][field])): + del request_init["event"][field][i][subfield] + else: + del request_init["event"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = report_errors_service.ReportErrorEventResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = report_errors_service.ReportErrorEventResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.report_error_event(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, report_errors_service.ReportErrorEventResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_report_error_event_rest_interceptors(null_interceptor): + transport = transports.ReportErrorsServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ReportErrorsServiceRestInterceptor(), + ) + client = ReportErrorsServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.ReportErrorsServiceRestInterceptor, "post_report_error_event" + ) as post, mock.patch.object( + transports.ReportErrorsServiceRestInterceptor, + "post_report_error_event_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.ReportErrorsServiceRestInterceptor, "pre_report_error_event" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = report_errors_service.ReportErrorEventRequest.pb( + report_errors_service.ReportErrorEventRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = report_errors_service.ReportErrorEventResponse.to_json( + report_errors_service.ReportErrorEventResponse() + ) + req.return_value.content = return_value + + request = report_errors_service.ReportErrorEventRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = report_errors_service.ReportErrorEventResponse() + post_with_metadata.return_value = ( + report_errors_service.ReportErrorEventResponse(), + metadata, + ) + + client.report_error_event( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_initialize_client_w_rest(): + client = ReportErrorsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_report_error_event_empty_call_rest(): + client = ReportErrorsServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.report_error_event), "__call__" + ) as call: + client.report_error_event(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = report_errors_service.ReportErrorEventRequest() + + assert args[0] == request_msg def test_transport_grpc_default(): @@ -2667,36 +2713,41 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) +def test_transport_close_grpc(): + client = ReportErrorsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_transport_close_grpc_asyncio(): client = ReportErrorsServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", + credentials=async_anonymous_credentials(), transport="grpc_asyncio" ) with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" + type(getattr(client.transport, "_grpc_channel")), "close" ) as close: async with client: close.assert_not_called() close.assert_called_once() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = ReportErrorsServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() +def test_transport_close_rest(): + client = ReportErrorsServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() def test_client_ctx(): From d998483acf55a03395a8e211db43f23cbd6f4296 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 Mar 2025 10:29:12 -0400 Subject: [PATCH 102/111] chore: remove unused files (#556) * chore: remove unused files * remove post processing for .kokoro/docs --- .github/.OwlBot.lock.yaml | 4 +- .kokoro/docker/docs/Dockerfile | 89 ----- .kokoro/docker/docs/requirements.in | 2 - .kokoro/docker/docs/requirements.txt | 297 --------------- .kokoro/docs/common.cfg | 86 ----- .kokoro/docs/docs-presubmit.cfg | 28 -- .kokoro/docs/docs.cfg | 1 - .kokoro/publish-docs.sh | 58 --- .kokoro/release/common.cfg | 68 ---- .kokoro/release/release.cfg | 1 - .kokoro/requirements.in | 11 - .kokoro/requirements.txt | 537 --------------------------- owlbot.py | 2 +- 13 files changed, 3 insertions(+), 1181 deletions(-) delete mode 100644 .kokoro/docker/docs/Dockerfile delete mode 100644 .kokoro/docker/docs/requirements.in delete mode 100644 .kokoro/docker/docs/requirements.txt delete mode 100644 .kokoro/docs/common.cfg delete mode 100644 .kokoro/docs/docs-presubmit.cfg delete mode 100644 .kokoro/docs/docs.cfg delete mode 100755 .kokoro/publish-docs.sh delete mode 100644 .kokoro/release/common.cfg delete mode 100644 .kokoro/release/release.cfg delete mode 100644 .kokoro/requirements.in delete mode 100644 .kokoro/requirements.txt diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 3f7634f2..c631e1f7 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf -# created: 2025-02-21T19:32:52.01306189Z + digest: sha256:5581906b957284864632cde4e9c51d1cc66b0094990b27e689132fe5cd036046 +# created: 2025-03-05 diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile deleted file mode 100644 index e5410e29..00000000 --- a/.kokoro/docker/docs/Dockerfile +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ubuntu:24.04 - -ENV DEBIAN_FRONTEND noninteractive - -# Ensure local Python is preferred over distribution Python. -ENV PATH /usr/local/bin:$PATH - -# Install dependencies. -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - apt-transport-https \ - build-essential \ - ca-certificates \ - curl \ - dirmngr \ - git \ - gpg-agent \ - graphviz \ - libbz2-dev \ - libdb5.3-dev \ - libexpat1-dev \ - libffi-dev \ - liblzma-dev \ - libreadline-dev \ - libsnappy-dev \ - libssl-dev \ - libsqlite3-dev \ - portaudio19-dev \ - redis-server \ - software-properties-common \ - ssh \ - sudo \ - tcl \ - tcl-dev \ - tk \ - tk-dev \ - uuid-dev \ - wget \ - zlib1g-dev \ - && add-apt-repository universe \ - && apt-get update \ - && apt-get -y install jq \ - && apt-get clean autoclean \ - && apt-get autoremove -y \ - && rm -rf /var/lib/apt/lists/* \ - && rm -f /var/cache/apt/archives/*.deb - - -###################### Install python 3.10.14 for docs/docfx session - -# Download python 3.10.14 -RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz - -# Extract files -RUN tar -xvf Python-3.10.14.tgz - -# Install python 3.10.14 -RUN ./Python-3.10.14/configure --enable-optimizations -RUN make altinstall - -ENV PATH /usr/local/bin/python3.10:$PATH - -###################### Install pip -RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.10 /tmp/get-pip.py \ - && rm /tmp/get-pip.py - -# Test pip -RUN python3.10 -m pip - -# Install build requirements -COPY requirements.txt /requirements.txt -RUN python3.10 -m pip install --require-hashes -r requirements.txt - -CMD ["python3.10"] diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in deleted file mode 100644 index 586bd070..00000000 --- a/.kokoro/docker/docs/requirements.in +++ /dev/null @@ -1,2 +0,0 @@ -nox -gcp-docuploader diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt deleted file mode 100644 index a9360a25..00000000 --- a/.kokoro/docker/docs/requirements.txt +++ /dev/null @@ -1,297 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.5.3 \ - --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ - --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 - # via nox -cachetools==5.5.0 \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a - # via google-auth -certifi==2024.12.14 \ - --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ - --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db - # via requests -charset-normalizer==3.4.1 \ - --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ - --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ - --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ - --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ - --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ - --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ - --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ - --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ - --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ - --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ - --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ - --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ - --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ - --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ - --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ - --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ - --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ - --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ - --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ - --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ - --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ - --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ - --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ - --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ - --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ - --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ - --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ - --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ - --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ - --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ - --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ - --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ - --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ - --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ - --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ - --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ - --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ - --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ - --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ - --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ - --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ - --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ - --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ - --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ - --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ - --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ - --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ - --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ - --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ - --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ - --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ - --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ - --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ - --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ - --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ - --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ - --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ - --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ - --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ - --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ - --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ - --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ - --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ - --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ - --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ - --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ - --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ - --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ - --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ - --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ - --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ - --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ - --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ - --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ - --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ - --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ - --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ - --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ - --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ - --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ - --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ - --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ - --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ - --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ - --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ - --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ - --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ - --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ - --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ - --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ - --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ - --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 - # via requests -click==8.1.8 \ - --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ - --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a - # via gcp-docuploader -colorlog==6.9.0 \ - --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ - --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 - # via - # gcp-docuploader - # nox -distlib==0.3.9 \ - --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ - --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 - # via virtualenv -filelock==3.16.1 \ - --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ - --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -google-api-core==2.24.0 \ - --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ - --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.37.0 \ - --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ - --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 - # via - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.4.1 \ - --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ - --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 - # via google-cloud-storage -google-cloud-storage==2.19.0 \ - --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ - --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 - # via gcp-docuploader -google-crc32c==1.6.0 \ - --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ - --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ - --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ - --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ - --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ - --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ - --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ - --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ - --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ - --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ - --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ - --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ - --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ - --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ - --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ - --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ - --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ - --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ - --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ - --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ - --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ - --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ - --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ - --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ - --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ - --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ - --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.2 \ - --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ - --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 - # via google-cloud-storage -googleapis-common-protos==1.66.0 \ - --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ - --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed - # via google-api-core -idna==3.10 \ - --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ - --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 - # via requests -nox==2024.10.9 \ - --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ - --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in -packaging==24.2 \ - --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ - --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f - # via nox -platformdirs==4.3.6 \ - --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ - --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb - # via virtualenv -proto-plus==1.25.0 \ - --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ - --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 - # via google-api-core -protobuf==5.29.3 \ - --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ - --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ - --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ - --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ - --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ - --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ - --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ - --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ - --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ - --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ - --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 - # via - # gcp-docuploader - # google-api-core - # googleapis-common-protos - # proto-plus -pyasn1==0.6.1 \ - --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ - --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.1 \ - --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ - --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c - # via google-auth -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - # via - # google-api-core - # google-cloud-storage -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -six==1.17.0 \ - --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ - --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 - # via gcp-docuploader -tomli==2.2.1 \ - --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ - --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ - --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ - --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ - --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ - --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ - --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ - --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ - --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ - --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ - --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ - --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ - --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ - --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ - --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ - --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ - --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ - --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ - --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ - --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ - --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ - --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ - --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ - --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ - --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ - --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ - --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ - --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ - --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ - --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ - --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ - --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 - # via nox -urllib3==2.3.0 \ - --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ - --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d - # via requests -virtualenv==20.28.1 \ - --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ - --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 - # via nox diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg deleted file mode 100644 index b53153fe..00000000 --- a/.kokoro/docs/common.cfg +++ /dev/null @@ -1,86 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-error-reporting/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-error-reporting/.kokoro/publish-docs.sh" -} - -env_vars: { - key: "STAGING_BUCKET" - value: "docs-staging" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - # Push google cloud library docs to the Cloud RAD bucket `docs-staging-v2` - value: "docs-staging-v2" -} - -# It will upload the docker image after successful builds. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "true" -} - -# It will always build the docker image. -env_vars: { - key: "TRAMPOLINE_DOCKERFILE" - value: ".kokoro/docker/docs/Dockerfile" -} - -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} - - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "error-reporting" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### - diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg deleted file mode 100644 index 6d7b5186..00000000 --- a/.kokoro/docs/docs-presubmit.cfg +++ /dev/null @@ -1,28 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "STAGING_BUCKET" - value: "gcloud-python-test" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - value: "gcloud-python-test" -} - -# We only upload the image in the main `docs` build. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "false" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-error-reporting/.kokoro/build.sh" -} - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "docs docfx" -} diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg deleted file mode 100644 index 8f43917d..00000000 --- a/.kokoro/docs/docs.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh deleted file mode 100755 index 4ed4aaf1..00000000 --- a/.kokoro/publish-docs.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -export PATH="${HOME}/.local/bin:${PATH}" - -# build docs -nox -s docs - -# create metadata -python3.10 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3.10 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3.10 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" - - -# docfx yaml files -nox -s docfx - -# create metadata. -python3.10 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3.10 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3.10 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg deleted file mode 100644 index e0de520f..00000000 --- a/.kokoro/release/common.cfg +++ /dev/null @@ -1,68 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-error-reporting/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-error-reporting/.kokoro/release.sh" -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-2" - } - } -} - -# Tokens needed to report release status back to GitHub -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} - -# Store the packages we uploaded to PyPI. That way, we have a record of exactly -# what we published, which we can use to generate SBOMs and attestations. -action { - define_artifacts { - regex: "github/python-error-reporting/**/*.tar.gz" - strip_prefix: "github/python-error-reporting" - } -} - - -############################################# -# this section merged from .kokoro/common_env_vars.cfg using owlbot.py - -env_vars: { - key: "PRODUCT_AREA_LABEL" - value: "observability" -} -env_vars: { - key: "PRODUCT_LABEL" - value: "error-reporting" -} -env_vars: { - key: "LANGUAGE_LABEL" - value: "python" -} - -################################################### diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg deleted file mode 100644 index 8f43917d..00000000 --- a/.kokoro/release/release.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in deleted file mode 100644 index fff4d9ce..00000000 --- a/.kokoro/requirements.in +++ /dev/null @@ -1,11 +0,0 @@ -gcp-docuploader -gcp-releasetool>=2 # required for compatibility with cryptography>=42.x -importlib-metadata -typing-extensions -twine -wheel -setuptools -nox>=2022.11.21 # required to remove dependency on py -charset-normalizer<3 -click<8.1.0 -cryptography>=42.0.5 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt deleted file mode 100644 index 9622baf0..00000000 --- a/.kokoro/requirements.txt +++ /dev/null @@ -1,537 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.4.0 \ - --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ - --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f - # via nox -attrs==23.2.0 \ - --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ - --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 - # via gcp-releasetool -backports-tarfile==1.2.0 \ - --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ - --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 - # via jaraco-context -cachetools==5.3.3 \ - --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ - --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 - # via google-auth -certifi==2024.7.4 \ - --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ - --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 - # via requests -cffi==1.16.0 \ - --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ - --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ - --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ - --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ - --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ - --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ - --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ - --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ - --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ - --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ - --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ - --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ - --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ - --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ - --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ - --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ - --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ - --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ - --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ - --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ - --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ - --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ - --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ - --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ - --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ - --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ - --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ - --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ - --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ - --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ - --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ - --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ - --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ - --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ - --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ - --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ - --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ - --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ - --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ - --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ - --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ - --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ - --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ - --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ - --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ - --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ - --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ - --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ - --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ - --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ - --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ - --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 - # via cryptography -charset-normalizer==2.1.1 \ - --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ - --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via - # -r requirements.in - # requests -click==8.0.4 \ - --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ - --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb - # via - # -r requirements.in - # gcp-docuploader - # gcp-releasetool -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 - # via - # gcp-docuploader - # nox -cryptography==42.0.8 \ - --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \ - --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \ - --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \ - --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \ - --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \ - --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \ - --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \ - --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \ - --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \ - --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \ - --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \ - --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \ - --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \ - --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \ - --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \ - --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \ - --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \ - --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \ - --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \ - --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \ - --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \ - --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \ - --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \ - --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \ - --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \ - --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \ - --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \ - --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \ - --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \ - --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \ - --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \ - --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e - # via - # -r requirements.in - # gcp-releasetool - # secretstorage -distlib==0.3.8 \ - --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ - --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 - # via virtualenv -docutils==0.21.2 \ - --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ - --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 - # via readme-renderer -filelock==3.15.4 \ - --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ - --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -gcp-releasetool==2.0.1 \ - --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \ - --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62 - # via -r requirements.in -google-api-core==2.19.1 \ - --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ - --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.31.0 \ - --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \ - --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871 - # via - # gcp-releasetool - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.4.1 \ - --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ - --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 - # via google-cloud-storage -google-cloud-storage==2.17.0 \ - --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \ - --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1 - # via gcp-docuploader -google-crc32c==1.5.0 \ - --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ - --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ - --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ - --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ - --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ - --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ - --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ - --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ - --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ - --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ - --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ - --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ - --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ - --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ - --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ - --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ - --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ - --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ - --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ - --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ - --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ - --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ - --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ - --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ - --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ - --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ - --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ - --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ - --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ - --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ - --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ - --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ - --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ - --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ - --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ - --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ - --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ - --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ - --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ - --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ - --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ - --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ - --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ - --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ - --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ - --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ - --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ - --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ - --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ - --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ - --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ - --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ - --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ - --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ - --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ - --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ - --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ - --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ - --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ - --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ - --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ - --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ - --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ - --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ - --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ - --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ - --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ - --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.1 \ - --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \ - --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33 - # via google-cloud-storage -googleapis-common-protos==1.63.2 \ - --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ - --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 - # via google-api-core -idna==3.7 \ - --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ - --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 - # via requests -importlib-metadata==8.0.0 \ - --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ - --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 - # via - # -r requirements.in - # keyring - # twine -jaraco-classes==3.4.0 \ - --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ - --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 - # via keyring -jaraco-context==5.3.0 \ - --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ - --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 - # via keyring -jaraco-functools==4.0.1 \ - --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \ - --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8 - # via keyring -jeepney==0.8.0 \ - --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ - --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - # via - # keyring - # secretstorage -jinja2==3.1.4 \ - --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ - --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d - # via gcp-releasetool -keyring==25.2.1 \ - --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ - --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b - # via - # gcp-releasetool - # twine -markdown-it-py==3.0.0 \ - --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ - --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb - # via rich -markupsafe==2.1.5 \ - --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ - --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ - --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ - --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ - --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ - --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ - --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ - --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ - --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ - --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ - --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ - --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ - --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ - --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ - --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ - --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ - --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ - --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ - --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ - --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ - --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ - --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ - --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ - --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ - --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ - --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ - --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ - --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ - --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ - --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ - --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ - --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ - --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ - --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ - --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ - --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ - --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ - --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ - --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ - --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ - --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ - --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ - --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ - --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ - --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ - --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ - --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ - --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ - --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ - --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ - --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ - --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ - --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ - --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ - --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ - --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ - --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ - --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ - --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ - --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 - # via jinja2 -mdurl==0.1.2 \ - --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ - --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba - # via markdown-it-py -more-itertools==10.3.0 \ - --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ - --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 - # via - # jaraco-classes - # jaraco-functools -nh3==0.2.18 \ - --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \ - --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \ - --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \ - --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \ - --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \ - --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \ - --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \ - --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \ - --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \ - --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \ - --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \ - --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \ - --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \ - --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \ - --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ - --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe - # via readme-renderer -nox==2024.4.15 \ - --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ - --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f - # via -r requirements.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 - # via - # gcp-releasetool - # nox -pkginfo==1.10.0 \ - --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ - --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 - # via twine -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 - # via virtualenv -proto-plus==1.24.0 \ - --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ - --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 - # via google-api-core -protobuf==5.27.2 \ - --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ - --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ - --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ - --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ - --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ - --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ - --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ - --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ - --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ - --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ - --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 - # via - # gcp-docuploader - # gcp-releasetool - # google-api-core - # googleapis-common-protos - # proto-plus -pyasn1==0.6.0 \ - --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ - --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.0 \ - --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ - --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b - # via google-auth -pycparser==2.22 \ - --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ - --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc - # via cffi -pygments==2.18.0 \ - --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ - --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a - # via - # readme-renderer - # rich -pyjwt==2.8.0 \ - --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ - --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 - # via gcp-releasetool -pyperclip==1.9.0 \ - --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 - # via gcp-releasetool -python-dateutil==2.9.0.post0 \ - --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ - --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 - # via gcp-releasetool -readme-renderer==44.0 \ - --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \ - --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1 - # via twine -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - # via - # gcp-releasetool - # google-api-core - # google-cloud-storage - # requests-toolbelt - # twine -requests-toolbelt==1.0.0 \ - --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ - --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 - # via twine -rfc3986==2.0.0 \ - --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ - --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c - # via twine -rich==13.7.1 \ - --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ - --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 - # via twine -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -secretstorage==3.3.3 \ - --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ - --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 - # via keyring -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via - # gcp-docuploader - # python-dateutil -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f - # via nox -twine==5.1.1 \ - --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ - --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db - # via -r requirements.in -typing-extensions==4.12.2 \ - --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ - --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via -r requirements.in -urllib3==2.2.2 \ - --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ - --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 - # via - # requests - # twine -virtualenv==20.26.3 \ - --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ - --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 - # via nox -wheel==0.43.0 \ - --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ - --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 - # via -r requirements.in -zipp==3.19.2 \ - --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ - --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -setuptools==70.2.0 \ - --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \ - --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1 - # via -r requirements.in diff --git a/owlbot.py b/owlbot.py index 47e52719..0f851388 100644 --- a/owlbot.py +++ b/owlbot.py @@ -79,7 +79,7 @@ # -------------------------------------------------------------------------- # add shared environment variables to test configs -tracked_subdirs = ["continuous", "presubmit", "samples", "docs"] +tracked_subdirs = ["continuous", "presubmit", "samples"] for subdir in tracked_subdirs: for path, subdirs, files in os.walk(f".kokoro/{subdir}"): for name in files: From ce1ecd164277b9318c4ae4ab5542a063acc8e151 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 Mar 2025 10:47:10 -0400 Subject: [PATCH 103/111] chore: Delete .kokoro/release.sh (#559) --- .kokoro/release.sh | 29 ----------------------------- 1 file changed, 29 deletions(-) delete mode 100755 .kokoro/release.sh diff --git a/.kokoro/release.sh b/.kokoro/release.sh deleted file mode 100755 index 0b483792..00000000 --- a/.kokoro/release.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Start the releasetool reporter -python3 -m pip install --require-hashes -r github/python-error-reporting/.kokoro/requirements.txt -python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2") -cd github/python-error-reporting -python3 setup.py sdist bdist_wheel -twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* From 9f8faeba223a0e1834c0750d21b5cafdee74d327 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 17 Mar 2025 11:05:22 -0400 Subject: [PATCH 104/111] fix: Allow Protobuf 6.x (#557) Co-authored-by: ohmayr --- setup.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/setup.py b/setup.py index d3f038a7..bed18b8b 100644 --- a/setup.py +++ b/setup.py @@ -38,14 +38,14 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-cloud-logging>=1.14.0, <4.0.0dev", + "google-cloud-logging>=1.14.0, <4.0.0", # Exclude incompatible versions of `google-auth` # See https://github.com/googleapis/google-cloud-python/issues/12364 - "google-auth >= 2.14.1, <3.0.0dev,!=2.24.0,!=2.25.0", - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "google-auth >= 2.14.1, <3.0.0,!=2.24.0,!=2.25.0", + "google-api-core[grpc] >= 1.34.0, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0", + "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-error-reporting" From a79c0912d1eb426992a6db6fbd5231f6d693c268 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 19 Mar 2025 09:35:23 -0400 Subject: [PATCH 105/111] chore: Update gapic-generator-python to 1.23.6 (#563) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to 1.23.6 PiperOrigin-RevId: 738170370 Source-Link: https://github.com/googleapis/googleapis/commit/3f1e17aa2dec3f146a9a2a8a64c5c6d19d0b6e15 Source-Link: https://github.com/googleapis/googleapis-gen/commit/9afd8c33d4cae610b75fa4999264ea8c8c66b9d2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOWFmZDhjMzNkNGNhZTYxMGI3NWZhNDk5OTI2NGVhOGM4YzY2YjlkMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- google/cloud/errorreporting_v1beta1/__init__.py | 2 +- google/cloud/errorreporting_v1beta1/services/__init__.py | 2 +- .../services/error_group_service/__init__.py | 2 +- .../services/error_group_service/async_client.py | 2 +- .../services/error_group_service/client.py | 2 +- .../services/error_group_service/transports/__init__.py | 2 +- .../services/error_group_service/transports/base.py | 2 +- .../services/error_group_service/transports/grpc.py | 2 +- .../services/error_group_service/transports/grpc_asyncio.py | 2 +- .../services/error_group_service/transports/rest.py | 2 +- .../services/error_group_service/transports/rest_base.py | 2 +- .../services/error_stats_service/__init__.py | 2 +- .../services/error_stats_service/async_client.py | 2 +- .../services/error_stats_service/client.py | 2 +- .../services/error_stats_service/pagers.py | 2 +- .../services/error_stats_service/transports/__init__.py | 2 +- .../services/error_stats_service/transports/base.py | 2 +- .../services/error_stats_service/transports/grpc.py | 2 +- .../services/error_stats_service/transports/grpc_asyncio.py | 2 +- .../services/error_stats_service/transports/rest.py | 2 +- .../services/error_stats_service/transports/rest_base.py | 2 +- .../services/report_errors_service/__init__.py | 2 +- .../services/report_errors_service/async_client.py | 2 +- .../services/report_errors_service/client.py | 2 +- .../services/report_errors_service/transports/__init__.py | 2 +- .../services/report_errors_service/transports/base.py | 2 +- .../services/report_errors_service/transports/grpc.py | 2 +- .../services/report_errors_service/transports/grpc_asyncio.py | 2 +- .../services/report_errors_service/transports/rest.py | 2 +- .../services/report_errors_service/transports/rest_base.py | 2 +- google/cloud/errorreporting_v1beta1/types/__init__.py | 2 +- google/cloud/errorreporting_v1beta1/types/common.py | 2 +- .../cloud/errorreporting_v1beta1/types/error_group_service.py | 2 +- .../cloud/errorreporting_v1beta1/types/error_stats_service.py | 2 +- .../cloud/errorreporting_v1beta1/types/report_errors_service.py | 2 +- ...ing_v1beta1_generated_error_group_service_get_group_async.py | 2 +- ...ting_v1beta1_generated_error_group_service_get_group_sync.py | 2 +- ..._v1beta1_generated_error_group_service_update_group_async.py | 2 +- ...g_v1beta1_generated_error_group_service_update_group_sync.py | 2 +- ...v1beta1_generated_error_stats_service_delete_events_async.py | 2 +- ..._v1beta1_generated_error_stats_service_delete_events_sync.py | 2 +- ...g_v1beta1_generated_error_stats_service_list_events_async.py | 2 +- ...ng_v1beta1_generated_error_stats_service_list_events_sync.py | 2 +- ...eta1_generated_error_stats_service_list_group_stats_async.py | 2 +- ...beta1_generated_error_stats_service_list_group_stats_sync.py | 2 +- ..._generated_report_errors_service_report_error_event_async.py | 2 +- ...1_generated_report_errors_service_report_error_event_sync.py | 2 +- scripts/fixup_errorreporting_v1beta1_keywords.py | 2 +- tests/__init__.py | 2 +- tests/unit/__init__.py | 2 +- tests/unit/gapic/__init__.py | 2 +- tests/unit/gapic/errorreporting_v1beta1/__init__.py | 2 +- .../gapic/errorreporting_v1beta1/test_error_group_service.py | 2 +- .../gapic/errorreporting_v1beta1/test_error_stats_service.py | 2 +- .../gapic/errorreporting_v1beta1/test_report_errors_service.py | 2 +- 55 files changed, 55 insertions(+), 55 deletions(-) diff --git a/google/cloud/errorreporting_v1beta1/__init__.py b/google/cloud/errorreporting_v1beta1/__init__.py index f2c069f9..e6ff221d 100644 --- a/google/cloud/errorreporting_v1beta1/__init__.py +++ b/google/cloud/errorreporting_v1beta1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/__init__.py b/google/cloud/errorreporting_v1beta1/services/__init__.py index 8f6cf068..cbf94b28 100644 --- a/google/cloud/errorreporting_v1beta1/services/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/__init__.py index 7adebfc3..1e8d42ee 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py index c4c8feae..d93e9a46 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py index d4b2a29b..f84c28ab 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py index ee3446ba..f02ba22b 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py index 52b03cea..0fec2890 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py index 3d24d3eb..cb308ce0 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py index 24e5f0eb..f3186e61 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py index 80bba4e5..078a817d 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest_base.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest_base.py index 0668d9ac..f4330f97 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest_base.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/__init__.py index a31a509a..e04db91b 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py index 3b804ae4..389c161b 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py index 6f67b822..84a75273 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py index 277a4112..190fecd9 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py index 69c4f43c..8f452e45 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py index 2ca8391c..bb733a49 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py index 08a27874..fd240316 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py index 7085b89b..a6211645 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py index 11a6cdd9..e3c053d8 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest_base.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest_base.py index cd1c2a0f..77269984 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest_base.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/__init__.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/__init__.py index d9e40c58..2e3c1322 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py index 76e85eed..e58ed221 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py index aa9f01e8..31c8163d 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py index bf8256bc..83aa46c6 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py index 9bb43400..b22d3ac0 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py index e9cf45f3..9f5f302e 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py index 609b0be2..18f942ce 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py index b6120041..682387e2 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest_base.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest_base.py index 61e2fbf1..f6495841 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest_base.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/types/__init__.py b/google/cloud/errorreporting_v1beta1/types/__init__.py index 42ab63d4..4acdb9a1 100644 --- a/google/cloud/errorreporting_v1beta1/types/__init__.py +++ b/google/cloud/errorreporting_v1beta1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/types/common.py b/google/cloud/errorreporting_v1beta1/types/common.py index 1278fa58..84320a89 100644 --- a/google/cloud/errorreporting_v1beta1/types/common.py +++ b/google/cloud/errorreporting_v1beta1/types/common.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/types/error_group_service.py b/google/cloud/errorreporting_v1beta1/types/error_group_service.py index cfd82fa7..be126768 100644 --- a/google/cloud/errorreporting_v1beta1/types/error_group_service.py +++ b/google/cloud/errorreporting_v1beta1/types/error_group_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py index 93158ab5..0285d9a8 100644 --- a/google/cloud/errorreporting_v1beta1/types/error_stats_service.py +++ b/google/cloud/errorreporting_v1beta1/types/error_stats_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/errorreporting_v1beta1/types/report_errors_service.py b/google/cloud/errorreporting_v1beta1/types/report_errors_service.py index 87c1b14d..ebd33856 100644 --- a/google/cloud/errorreporting_v1beta1/types/report_errors_service.py +++ b/google/cloud/errorreporting_v1beta1/types/report_errors_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py index dc84897a..4a0dabf5 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py index e260be1f..79c76d18 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_get_group_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py index 62a82368..58a7d5b3 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py index 7239a97f..9fa53da6 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_group_service_update_group_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py index 4bf09138..6479d386 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py index 6194de23..60dcebe9 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_delete_events_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py index 49f25ac7..3b143dca 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py index 99d4ca32..273d4a44 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_events_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py index 8ffdf643..92ef3ed1 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py index a696194b..ed6b7f5a 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_error_stats_service_list_group_stats_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py index 5dc4e264..b2c9d42e 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py index f75f5591..b413c1f6 100644 --- a/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py +++ b/samples/generated_samples/clouderrorreporting_v1beta1_generated_report_errors_service_report_error_event_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/fixup_errorreporting_v1beta1_keywords.py b/scripts/fixup_errorreporting_v1beta1_keywords.py index 69a5a229..60f94ffc 100644 --- a/scripts/fixup_errorreporting_v1beta1_keywords.py +++ b/scripts/fixup_errorreporting_v1beta1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/__init__.py b/tests/__init__.py index 8f6cf068..cbf94b28 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index 8f6cf068..cbf94b28 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py index 8f6cf068..cbf94b28 100644 --- a/tests/unit/gapic/__init__.py +++ b/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/errorreporting_v1beta1/__init__.py b/tests/unit/gapic/errorreporting_v1beta1/__init__.py index 8f6cf068..cbf94b28 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/__init__.py +++ b/tests/unit/gapic/errorreporting_v1beta1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py index cacaa620..67adff43 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_group_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py index 437f9b4a..0b1bdc25 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_error_stats_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py index b8ad46b1..9ac7c004 100644 --- a/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py +++ b/tests/unit/gapic/errorreporting_v1beta1/test_report_errors_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 844a554b122defaa243535c3725bb70f94e379a8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 31 Mar 2025 14:14:43 -0400 Subject: [PATCH 106/111] chore(python): fix incorrect import statement in README (#564) Source-Link: https://github.com/googleapis/synthtool/commit/87677404f85cee860588ebe2c352d0609f683d5d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:023a21377a2a00008057f99f0118edadc30a19d1636a3fee47189ebec2f3921c Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 4 ++-- README.rst | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index c631e1f7..c4e82889 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5581906b957284864632cde4e9c51d1cc66b0094990b27e689132fe5cd036046 -# created: 2025-03-05 + digest: sha256:023a21377a2a00008057f99f0118edadc30a19d1636a3fee47189ebec2f3921c +# created: 2025-03-31T16:51:40.130756953Z diff --git a/README.rst b/README.rst index 484923c1..a1b9e25e 100644 --- a/README.rst +++ b/README.rst @@ -162,7 +162,7 @@ Code-Based Examples import logging - from google.cloud.translate_v3 import translate + from google.cloud import library_v1 base_logger = logging.getLogger("google") base_logger.addHandler(logging.StreamHandler()) @@ -174,7 +174,7 @@ Code-Based Examples import logging - from google.cloud.translate_v3 import translate + from google.cloud import library_v1 base_logger = logging.getLogger("google.cloud.library_v1") base_logger.addHandler(logging.StreamHandler()) From 0738c03fa4321fd29c0915da2336bf77947367ae Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 16 Apr 2025 12:47:27 -0400 Subject: [PATCH 107/111] fix: remove setup.cfg configuration for creating universal wheels (#562) --- setup.cfg | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 setup.cfg diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 05235008..00000000 --- a/setup.cfg +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[bdist_wheel] -universal = 1 From 3fdadbe34dbcd25d6e8015d1d519b994f9da0ac6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 16 Apr 2025 10:07:05 -0700 Subject: [PATCH 108/111] chore: Update gapic-generator-python to 1.24.0 (#565) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to 1.24.0 PiperOrigin-RevId: 747419463 Source-Link: https://github.com/googleapis/googleapis/commit/340579bf7f97ba56cda0c70176dc5b03a8357667 Source-Link: https://github.com/googleapis/googleapis-gen/commit/e8997ec5136ecb6ed9a969a4c2f13b3ab6a17c12 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZTg5OTdlYzUxMzZlY2I2ZWQ5YTk2OWE0YzJmMTNiM2FiNmExN2MxMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../error_group_service/transports/grpc.py | 3 +-- .../error_stats_service/transports/grpc.py | 3 +-- .../report_errors_service/transports/grpc.py | 3 +-- testing/constraints-3.13.txt | 15 ++++++++++----- 4 files changed, 13 insertions(+), 11 deletions(-) diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py index cb308ce0..8bfab8f0 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/grpc.py @@ -71,12 +71,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra={ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorGroupService", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py index fd240316..9bcb9106 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/grpc.py @@ -70,12 +70,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra={ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ErrorStatsService", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py index 9f5f302e..023d0f0f 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/grpc.py @@ -70,12 +70,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra={ "serviceName": "google.devtools.clouderrorreporting.v1beta1.ReportErrorsService", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt index ed7f9aed..c20a7781 100644 --- a/testing/constraints-3.13.txt +++ b/testing/constraints-3.13.txt @@ -1,6 +1,11 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. # List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 From d5cd225fd71d8b54197c4f02c30d32eb8cd24dfa Mon Sep 17 00:00:00 2001 From: Kevin Zheng <147537668+gkevinzheng@users.noreply.github.com> Date: Mon, 28 Apr 2025 15:45:42 -0400 Subject: [PATCH 109/111] fix: require proto-plus >= 1.25.0 for Python 3.13 (#567) --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index bed18b8b..530e3c7f 100644 --- a/setup.py +++ b/setup.py @@ -45,6 +45,7 @@ "google-api-core[grpc] >= 1.34.0, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0", "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'", + "proto-plus >= 1.25.0, <2.0.0dev; python_version>='3.13'", "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-error-reporting" From 8f3140819bc6dc26d3c62da0f6f50abf749047cd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 21 May 2025 06:42:36 -0400 Subject: [PATCH 110/111] chore: Update gapic-generator-python to 1.25.0 (#568) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to 1.25.0 PiperOrigin-RevId: 755914147 Source-Link: https://github.com/googleapis/googleapis/commit/97a83d76a09a7f6dcab43675c87bdfeb5bcf1cb5 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a9977efedc836ccece1f01d529b0315e1efe52ad Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTk5NzdlZmVkYzgzNmNjZWNlMWYwMWQ1MjliMDMxNWUxZWZlNTJhZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/error_group_service/async_client.py | 4 ++++ .../services/error_group_service/client.py | 3 +++ .../services/error_group_service/transports/base.py | 4 ++++ .../services/error_group_service/transports/rest.py | 4 ++++ .../services/error_stats_service/async_client.py | 4 ++++ .../services/error_stats_service/client.py | 3 +++ .../services/error_stats_service/transports/base.py | 4 ++++ .../services/error_stats_service/transports/rest.py | 4 ++++ .../services/report_errors_service/async_client.py | 4 ++++ .../services/report_errors_service/client.py | 3 +++ .../services/report_errors_service/transports/base.py | 4 ++++ .../services/report_errors_service/transports/rest.py | 4 ++++ 12 files changed, 45 insertions(+) diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py index d93e9a46..3e9cf737 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/async_client.py @@ -37,6 +37,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -547,5 +548,8 @@ async def __aexit__(self, exc_type, exc, tb): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("ErrorGroupServiceAsyncClient",) diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py index f84c28ab..cc4b15df 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/client.py @@ -45,6 +45,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -964,5 +965,7 @@ def __exit__(self, type, value, traceback): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("ErrorGroupServiceClient",) diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py index 0fec2890..9938eaf4 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.errorreporting_v1beta1.types import common from google.cloud.errorreporting_v1beta1.types import error_group_service @@ -33,6 +34,9 @@ gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class ErrorGroupServiceTransport(abc.ABC): """Abstract transport class for ErrorGroupService.""" diff --git a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py index 078a817d..526f1d7f 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/error_group_service/transports/rest.py @@ -23,6 +23,7 @@ from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format @@ -59,6 +60,9 @@ rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class ErrorGroupServiceRestInterceptor: """Interceptor for ErrorGroupService. diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py index 389c161b..f588025e 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/async_client.py @@ -37,6 +37,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -753,5 +754,8 @@ async def __aexit__(self, exc_type, exc, tb): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("ErrorStatsServiceAsyncClient",) diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py index 84a75273..dd22c33b 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/client.py @@ -45,6 +45,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -1167,5 +1168,7 @@ def __exit__(self, type, value, traceback): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("ErrorStatsServiceClient",) diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py index bb733a49..12f6992a 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.errorreporting_v1beta1.types import error_stats_service @@ -32,6 +33,9 @@ gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class ErrorStatsServiceTransport(abc.ABC): """Abstract transport class for ErrorStatsService.""" diff --git a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py index e3c053d8..1b7a4cc5 100644 --- a/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/error_stats_service/transports/rest.py @@ -23,6 +23,7 @@ from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format @@ -58,6 +59,9 @@ rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class ErrorStatsServiceRestInterceptor: """Interceptor for ErrorStatsService. diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py index e58ed221..33c90024 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/async_client.py @@ -37,6 +37,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -453,5 +454,8 @@ async def __aexit__(self, exc_type, exc, tb): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("ReportErrorsServiceAsyncClient",) diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py index 31c8163d..9dc31d29 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/client.py @@ -45,6 +45,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -860,5 +861,7 @@ def __exit__(self, type, value, traceback): gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("ReportErrorsServiceClient",) diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py index b22d3ac0..31f69093 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.cloud.errorreporting_v1beta1.types import report_errors_service @@ -32,6 +33,9 @@ gapic_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class ReportErrorsServiceTransport(abc.ABC): """Abstract transport class for ReportErrorsService.""" diff --git a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py index 682387e2..98c63125 100644 --- a/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py +++ b/google/cloud/errorreporting_v1beta1/services/report_errors_service/transports/rest.py @@ -23,6 +23,7 @@ from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format @@ -58,6 +59,9 @@ rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class ReportErrorsServiceRestInterceptor: """Interceptor for ReportErrorsService. From 92b85ce05a600c731e8e192436aaf99c77366649 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 23 Jun 2025 13:25:06 -0400 Subject: [PATCH 111/111] chore(main): release 1.12.0 (#553) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 16 ++++++++++++++++ google/cloud/error_reporting/gapic_version.py | 2 +- .../errorreporting_v1beta1/gapic_version.py | 2 +- ...gle.devtools.clouderrorreporting.v1beta1.json | 2 +- 5 files changed, 20 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index c42d93fc..b3960c05 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.11.1" + ".": "1.12.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index d3385b60..a09f8129 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,22 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## [1.12.0](https://github.com/googleapis/python-error-reporting/compare/v1.11.1...v1.12.0) (2025-05-21) + + +### Features + +* Add REST Interceptors which support reading metadata ([1d120c7](https://github.com/googleapis/python-error-reporting/commit/1d120c77c73b566796b32cab017a0d4cdfa28713)) +* Add support for opt-in debug logging ([1d120c7](https://github.com/googleapis/python-error-reporting/commit/1d120c77c73b566796b32cab017a0d4cdfa28713)) + + +### Bug Fixes + +* Allow Protobuf 6.x ([#557](https://github.com/googleapis/python-error-reporting/issues/557)) ([9f8faeb](https://github.com/googleapis/python-error-reporting/commit/9f8faeba223a0e1834c0750d21b5cafdee74d327)) +* Fix typing issue with gRPC metadata when key ends in -bin ([1d120c7](https://github.com/googleapis/python-error-reporting/commit/1d120c77c73b566796b32cab017a0d4cdfa28713)) +* Remove setup.cfg configuration for creating universal wheels ([#562](https://github.com/googleapis/python-error-reporting/issues/562)) ([0738c03](https://github.com/googleapis/python-error-reporting/commit/0738c03fa4321fd29c0915da2336bf77947367ae)) +* Require proto-plus >= 1.25.0 for Python 3.13 ([#567](https://github.com/googleapis/python-error-reporting/issues/567)) ([d5cd225](https://github.com/googleapis/python-error-reporting/commit/d5cd225fd71d8b54197c4f02c30d32eb8cd24dfa)) + ## [1.11.1](https://github.com/googleapis/python-error-reporting/compare/v1.11.0...v1.11.1) (2024-09-17) diff --git a/google/cloud/error_reporting/gapic_version.py b/google/cloud/error_reporting/gapic_version.py index a034d27a..7138f214 100644 --- a/google/cloud/error_reporting/gapic_version.py +++ b/google/cloud/error_reporting/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.1" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/google/cloud/errorreporting_v1beta1/gapic_version.py b/google/cloud/errorreporting_v1beta1/gapic_version.py index a034d27a..7138f214 100644 --- a/google/cloud/errorreporting_v1beta1/gapic_version.py +++ b/google/cloud/errorreporting_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.11.1" # {x-release-please-version} +__version__ = "1.12.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json index f3b984e5..226f3879 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.clouderrorreporting.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-error-reporting", - "version": "0.1.0" + "version": "1.12.0" }, "snippets": [ {