diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index 597e0c32..51b21a62 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,4 +1,4 @@
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,5 +13,5 @@
# limitations under the License.
docker:
image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
- digest: sha256:e8dcfd7cbfd8beac3a3ff8d3f3185287ea0625d859168cc80faccfc9a7a00455
-# created: 2024-09-16T21:04:09.091105552Z
+ digest: sha256:a7aef70df5f13313ddc027409fc8f3151422ec2a57ac8730fce8fa75c060d5bb
+# created: 2025-04-10T17:00:10.042601326Z
diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml
index d4ca9418..50e8bd30 100644
--- a/.github/release-trigger.yml
+++ b/.github/release-trigger.yml
@@ -1 +1,2 @@
enabled: true
+multiScmName: python-api-core
diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml
index a19b27a7..b724bada 100644
--- a/.github/sync-repo-settings.yaml
+++ b/.github/sync-repo-settings.yaml
@@ -17,15 +17,37 @@ branchProtectionRules:
- 'unit_grpc_gcp-3.10'
- 'unit_grpc_gcp-3.11'
- 'unit_grpc_gcp-3.12'
+ - 'unit_grpc_gcp-3.13'
+ - 'unit_grpc_gcp-3.14'
- 'unit-3.7'
- 'unit-3.8'
- 'unit-3.9'
- 'unit-3.10'
- 'unit-3.11'
- 'unit-3.12'
+ - 'unit-3.13'
+ - 'unit-3.14'
- 'unit_wo_grpc-3.10'
- 'unit_wo_grpc-3.11'
- 'unit_wo_grpc-3.12'
+ - 'unit_wo_grpc-3.13'
+ - 'unit_wo_grpc-3.14'
+ - 'unit_w_prerelease_deps-3.7'
+ - 'unit_w_prerelease_deps-3.8'
+ - 'unit_w_prerelease_deps-3.9'
+ - 'unit_w_prerelease_deps-3.10'
+ - 'unit_w_prerelease_deps-3.11'
+ - 'unit_w_prerelease_deps-3.12'
+ - 'unit_w_prerelease_deps-3.13'
+ - 'unit_w_prerelease_deps-3.14'
+ - 'unit_w_async_rest_extra-3.7'
+ - 'unit_w_async_rest_extra-3.8'
+ - 'unit_w_async_rest_extra-3.9'
+ - 'unit_w_async_rest_extra-3.10'
+ - 'unit_w_async_rest_extra-3.11'
+ - 'unit_w_async_rest_extra-3.12'
+ - 'unit_w_async_rest_extra-3.13'
+ - 'unit_w_async_rest_extra-3.14'
- 'cover'
- 'docs'
- 'docfx'
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 698fbc5c..2833fe98 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -12,7 +12,7 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v5
with:
- python-version: "3.9"
+ python-version: "3.10"
- name: Install nox
run: |
python -m pip install --upgrade setuptools pip wheel
diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml
index 2d1193d6..f260a6a5 100644
--- a/.github/workflows/unittest.yml
+++ b/.github/workflows/unittest.yml
@@ -8,7 +8,10 @@ on:
jobs:
run-unittests:
name: unit${{ matrix.option }}-${{ matrix.python }}
- runs-on: ubuntu-latest
+ # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed.
+ # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix
+ # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories
+ runs-on: ubuntu-22.04
strategy:
matrix:
option: ["", "_grpc_gcp", "_wo_grpc", "_w_prerelease_deps", "_w_async_rest_extra"]
@@ -19,6 +22,8 @@ jobs:
- "3.10"
- "3.11"
- "3.12"
+ - "3.13"
+ - "3.14"
exclude:
- option: "_wo_grpc"
python: 3.7
@@ -33,6 +38,7 @@ jobs:
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python }}
+ allow-prereleases: true
- name: Install nox
run: |
python -m pip install --upgrade setuptools pip wheel
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index f05e867c..d41b45aa 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -15,11 +15,13 @@
set -eo pipefail
+CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}")
+
if [[ -z "${PROJECT_ROOT:-}" ]]; then
- PROJECT_ROOT="github/python-api-core"
+ PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..")
fi
-cd "${PROJECT_ROOT}"
+pushd "${PROJECT_ROOT}"
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
@@ -28,10 +30,16 @@ export PYTHONUNBUFFERED=1
env | grep KOKORO
# Setup service account credentials.
-export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
+if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]]
+then
+ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
+fi
# Setup project id.
-export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
+if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]]
+then
+ export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
+fi
# If this is a continuous build, send the test log to the FlakyBot.
# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
@@ -46,7 +54,7 @@ fi
# If NOX_SESSION is set, it only runs the specified session,
# otherwise run all the sessions.
if [[ -n "${NOX_SESSION:-}" ]]; then
- python3 -m nox -s ${NOX_SESSION:-}
+ python3 -m nox -s ${NOX_SESSION:-}
else
- python3 -m nox
+ python3 -m nox
fi
diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile
deleted file mode 100644
index e5410e29..00000000
--- a/.kokoro/docker/docs/Dockerfile
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ubuntu:24.04
-
-ENV DEBIAN_FRONTEND noninteractive
-
-# Ensure local Python is preferred over distribution Python.
-ENV PATH /usr/local/bin:$PATH
-
-# Install dependencies.
-RUN apt-get update \
- && apt-get install -y --no-install-recommends \
- apt-transport-https \
- build-essential \
- ca-certificates \
- curl \
- dirmngr \
- git \
- gpg-agent \
- graphviz \
- libbz2-dev \
- libdb5.3-dev \
- libexpat1-dev \
- libffi-dev \
- liblzma-dev \
- libreadline-dev \
- libsnappy-dev \
- libssl-dev \
- libsqlite3-dev \
- portaudio19-dev \
- redis-server \
- software-properties-common \
- ssh \
- sudo \
- tcl \
- tcl-dev \
- tk \
- tk-dev \
- uuid-dev \
- wget \
- zlib1g-dev \
- && add-apt-repository universe \
- && apt-get update \
- && apt-get -y install jq \
- && apt-get clean autoclean \
- && apt-get autoremove -y \
- && rm -rf /var/lib/apt/lists/* \
- && rm -f /var/cache/apt/archives/*.deb
-
-
-###################### Install python 3.10.14 for docs/docfx session
-
-# Download python 3.10.14
-RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz
-
-# Extract files
-RUN tar -xvf Python-3.10.14.tgz
-
-# Install python 3.10.14
-RUN ./Python-3.10.14/configure --enable-optimizations
-RUN make altinstall
-
-ENV PATH /usr/local/bin/python3.10:$PATH
-
-###################### Install pip
-RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \
- && python3.10 /tmp/get-pip.py \
- && rm /tmp/get-pip.py
-
-# Test pip
-RUN python3.10 -m pip
-
-# Install build requirements
-COPY requirements.txt /requirements.txt
-RUN python3.10 -m pip install --require-hashes -r requirements.txt
-
-CMD ["python3.10"]
diff --git a/.kokoro/docker/docs/requirements.in b/.kokoro/docker/docs/requirements.in
deleted file mode 100644
index 816817c6..00000000
--- a/.kokoro/docker/docs/requirements.in
+++ /dev/null
@@ -1 +0,0 @@
-nox
diff --git a/.kokoro/docker/docs/requirements.txt b/.kokoro/docker/docs/requirements.txt
deleted file mode 100644
index 7129c771..00000000
--- a/.kokoro/docker/docs/requirements.txt
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-# This file is autogenerated by pip-compile with Python 3.9
-# by the following command:
-#
-# pip-compile --allow-unsafe --generate-hashes requirements.in
-#
-argcomplete==3.4.0 \
- --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \
- --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f
- # via nox
-colorlog==6.8.2 \
- --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \
- --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33
- # via nox
-distlib==0.3.8 \
- --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \
- --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64
- # via virtualenv
-filelock==3.15.4 \
- --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \
- --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7
- # via virtualenv
-nox==2024.4.15 \
- --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \
- --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f
- # via -r requirements.in
-packaging==24.1 \
- --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \
- --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124
- # via nox
-platformdirs==4.2.2 \
- --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \
- --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3
- # via virtualenv
-tomli==2.0.1 \
- --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
- --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
- # via nox
-virtualenv==20.26.3 \
- --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \
- --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589
- # via nox
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
deleted file mode 100644
index 48e89855..00000000
--- a/.kokoro/docs/common.cfg
+++ /dev/null
@@ -1,67 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline_v2.sh"
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs"
-}
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/publish-docs.sh"
-}
-
-env_vars: {
- key: "STAGING_BUCKET"
- value: "docs-staging"
-}
-
-env_vars: {
- key: "V2_STAGING_BUCKET"
- # Push non-cloud library docs to `docs-staging-v2-staging` instead of the
- # Cloud RAD bucket `docs-staging-v2`
- value: "docs-staging-v2-staging"
-}
-
-# It will upload the docker image after successful builds.
-env_vars: {
- key: "TRAMPOLINE_IMAGE_UPLOAD"
- value: "true"
-}
-
-# It will always build the docker image.
-env_vars: {
- key: "TRAMPOLINE_DOCKERFILE"
- value: ".kokoro/docker/docs/Dockerfile"
-}
-
-# Fetch the token needed for reporting release status to GitHub
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "yoshi-automation-github-key"
- }
- }
-}
-
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "docuploader_service_account"
- }
- }
-}
\ No newline at end of file
diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg
deleted file mode 100644
index d1ed51eb..00000000
--- a/.kokoro/docs/docs-presubmit.cfg
+++ /dev/null
@@ -1,28 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-env_vars: {
- key: "STAGING_BUCKET"
- value: "gcloud-python-test"
-}
-
-env_vars: {
- key: "V2_STAGING_BUCKET"
- value: "gcloud-python-test"
-}
-
-# We only upload the image in the main `docs` build.
-env_vars: {
- key: "TRAMPOLINE_IMAGE_UPLOAD"
- value: "false"
-}
-
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/build.sh"
-}
-
-# Only run this nox session.
-env_vars: {
- key: "NOX_SESSION"
- value: "docs docfx"
-}
diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg
deleted file mode 100644
index 8f43917d..00000000
--- a/.kokoro/docs/docs.cfg
+++ /dev/null
@@ -1 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh
deleted file mode 100755
index 233205d5..00000000
--- a/.kokoro/publish-docs.sh
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/bin/bash
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -eo pipefail
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-export PATH="${HOME}/.local/bin:${PATH}"
-
-# Install nox
-python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt
-python3.10 -m nox --version
-
-# build docs
-nox -s docs
-
-# create metadata
-python3.10 -m docuploader create-metadata \
- --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
- --version=$(python3.10 setup.py --version) \
- --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
- --distribution-name=$(python3.10 setup.py --name) \
- --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
- --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
- --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
-
-cat docs.metadata
-
-# upload docs
-python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}"
-
-
-# docfx yaml files
-nox -s docfx
-
-# create metadata.
-python3.10 -m docuploader create-metadata \
- --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
- --version=$(python3.10 setup.py --version) \
- --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
- --distribution-name=$(python3.10 setup.py --name) \
- --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
- --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
- --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
-
-cat docs.metadata
-
-# upload docs
-python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}"
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
deleted file mode 100755
index dd1331c6..00000000
--- a/.kokoro/release.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-# Copyright 2024 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# https://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -eo pipefail
-
-# Start the releasetool reporter
-python3 -m pip install --require-hashes -r github/python-api-core/.kokoro/requirements.txt
-python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-# Move into the package, build the distribution and upload.
-TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2")
-cd github/python-api-core
-python3 setup.py sdist bdist_wheel
-twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/*
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
deleted file mode 100644
index bb8198fb..00000000
--- a/.kokoro/release/common.cfg
+++ /dev/null
@@ -1,49 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
-
-# Build logs will be here
-action {
- define_artifacts {
- regex: "**/*sponge_log.xml"
- }
-}
-
-# Download trampoline resources.
-gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
-
-# Use the trampoline script to run in docker.
-build_file: "python-api-core/.kokoro/trampoline.sh"
-
-# Configure the docker image for kokoro-trampoline.
-env_vars: {
- key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
-}
-env_vars: {
- key: "TRAMPOLINE_BUILD_FILE"
- value: "github/python-api-core/.kokoro/release.sh"
-}
-
-# Fetch PyPI password
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "google-cloud-pypi-token-keystore-2"
- }
- }
-}
-
-# Tokens needed to report release status back to GitHub
-env_vars: {
- key: "SECRET_MANAGER_KEYS"
- value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem"
-}
-
-# Store the packages we uploaded to PyPI. That way, we have a record of exactly
-# what we published, which we can use to generate SBOMs and attestations.
-action {
- define_artifacts {
- regex: "github/python-api-core/**/*.tar.gz"
- strip_prefix: "github/python-api-core"
- }
-}
diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg
deleted file mode 100644
index 8f43917d..00000000
--- a/.kokoro/release/release.cfg
+++ /dev/null
@@ -1 +0,0 @@
-# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in
deleted file mode 100644
index fff4d9ce..00000000
--- a/.kokoro/requirements.in
+++ /dev/null
@@ -1,11 +0,0 @@
-gcp-docuploader
-gcp-releasetool>=2 # required for compatibility with cryptography>=42.x
-importlib-metadata
-typing-extensions
-twine
-wheel
-setuptools
-nox>=2022.11.21 # required to remove dependency on py
-charset-normalizer<3
-click<8.1.0
-cryptography>=42.0.5
diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt
deleted file mode 100644
index 9622baf0..00000000
--- a/.kokoro/requirements.txt
+++ /dev/null
@@ -1,537 +0,0 @@
-#
-# This file is autogenerated by pip-compile with Python 3.9
-# by the following command:
-#
-# pip-compile --allow-unsafe --generate-hashes requirements.in
-#
-argcomplete==3.4.0 \
- --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \
- --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f
- # via nox
-attrs==23.2.0 \
- --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \
- --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1
- # via gcp-releasetool
-backports-tarfile==1.2.0 \
- --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \
- --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991
- # via jaraco-context
-cachetools==5.3.3 \
- --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \
- --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105
- # via google-auth
-certifi==2024.7.4 \
- --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \
- --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90
- # via requests
-cffi==1.16.0 \
- --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \
- --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \
- --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \
- --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \
- --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \
- --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \
- --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \
- --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \
- --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \
- --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \
- --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \
- --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \
- --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \
- --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \
- --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \
- --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \
- --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \
- --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \
- --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \
- --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \
- --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \
- --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \
- --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \
- --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \
- --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \
- --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \
- --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \
- --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \
- --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \
- --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \
- --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \
- --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \
- --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \
- --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \
- --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \
- --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \
- --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \
- --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \
- --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \
- --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \
- --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \
- --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \
- --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \
- --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \
- --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \
- --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \
- --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \
- --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \
- --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \
- --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \
- --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \
- --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357
- # via cryptography
-charset-normalizer==2.1.1 \
- --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \
- --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f
- # via
- # -r requirements.in
- # requests
-click==8.0.4 \
- --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \
- --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb
- # via
- # -r requirements.in
- # gcp-docuploader
- # gcp-releasetool
-colorlog==6.8.2 \
- --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \
- --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33
- # via
- # gcp-docuploader
- # nox
-cryptography==42.0.8 \
- --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \
- --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \
- --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \
- --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \
- --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \
- --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \
- --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \
- --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \
- --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \
- --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \
- --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \
- --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \
- --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \
- --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \
- --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \
- --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \
- --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \
- --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \
- --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \
- --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \
- --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \
- --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \
- --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \
- --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \
- --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \
- --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \
- --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \
- --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \
- --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \
- --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \
- --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \
- --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e
- # via
- # -r requirements.in
- # gcp-releasetool
- # secretstorage
-distlib==0.3.8 \
- --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \
- --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64
- # via virtualenv
-docutils==0.21.2 \
- --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \
- --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2
- # via readme-renderer
-filelock==3.15.4 \
- --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \
- --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7
- # via virtualenv
-gcp-docuploader==0.6.5 \
- --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \
- --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea
- # via -r requirements.in
-gcp-releasetool==2.0.1 \
- --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \
- --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62
- # via -r requirements.in
-google-api-core==2.19.1 \
- --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \
- --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd
- # via
- # google-cloud-core
- # google-cloud-storage
-google-auth==2.31.0 \
- --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \
- --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871
- # via
- # gcp-releasetool
- # google-api-core
- # google-cloud-core
- # google-cloud-storage
-google-cloud-core==2.4.1 \
- --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \
- --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61
- # via google-cloud-storage
-google-cloud-storage==2.17.0 \
- --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \
- --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1
- # via gcp-docuploader
-google-crc32c==1.5.0 \
- --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \
- --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \
- --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \
- --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \
- --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \
- --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \
- --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \
- --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \
- --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \
- --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \
- --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \
- --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \
- --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \
- --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \
- --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \
- --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \
- --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \
- --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \
- --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \
- --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \
- --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \
- --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \
- --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \
- --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \
- --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \
- --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \
- --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \
- --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \
- --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \
- --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \
- --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \
- --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \
- --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \
- --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \
- --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \
- --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \
- --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \
- --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \
- --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \
- --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \
- --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \
- --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \
- --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \
- --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \
- --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \
- --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \
- --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \
- --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \
- --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \
- --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \
- --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \
- --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \
- --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \
- --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \
- --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \
- --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \
- --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \
- --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \
- --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \
- --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \
- --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \
- --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \
- --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \
- --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \
- --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \
- --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \
- --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \
- --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4
- # via
- # google-cloud-storage
- # google-resumable-media
-google-resumable-media==2.7.1 \
- --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \
- --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33
- # via google-cloud-storage
-googleapis-common-protos==1.63.2 \
- --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \
- --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87
- # via google-api-core
-idna==3.7 \
- --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \
- --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0
- # via requests
-importlib-metadata==8.0.0 \
- --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \
- --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812
- # via
- # -r requirements.in
- # keyring
- # twine
-jaraco-classes==3.4.0 \
- --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \
- --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790
- # via keyring
-jaraco-context==5.3.0 \
- --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \
- --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2
- # via keyring
-jaraco-functools==4.0.1 \
- --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \
- --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8
- # via keyring
-jeepney==0.8.0 \
- --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \
- --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755
- # via
- # keyring
- # secretstorage
-jinja2==3.1.4 \
- --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \
- --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d
- # via gcp-releasetool
-keyring==25.2.1 \
- --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \
- --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b
- # via
- # gcp-releasetool
- # twine
-markdown-it-py==3.0.0 \
- --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \
- --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb
- # via rich
-markupsafe==2.1.5 \
- --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \
- --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \
- --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \
- --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \
- --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \
- --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \
- --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \
- --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \
- --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \
- --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \
- --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \
- --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \
- --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \
- --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \
- --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \
- --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \
- --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \
- --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \
- --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \
- --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \
- --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \
- --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \
- --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \
- --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \
- --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \
- --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \
- --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \
- --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \
- --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \
- --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \
- --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \
- --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \
- --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \
- --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \
- --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \
- --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \
- --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \
- --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \
- --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \
- --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \
- --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \
- --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \
- --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \
- --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \
- --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \
- --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \
- --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \
- --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \
- --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \
- --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \
- --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \
- --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \
- --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \
- --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \
- --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \
- --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \
- --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \
- --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \
- --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \
- --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68
- # via jinja2
-mdurl==0.1.2 \
- --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \
- --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba
- # via markdown-it-py
-more-itertools==10.3.0 \
- --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \
- --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320
- # via
- # jaraco-classes
- # jaraco-functools
-nh3==0.2.18 \
- --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \
- --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \
- --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \
- --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \
- --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \
- --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \
- --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \
- --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \
- --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \
- --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \
- --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \
- --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \
- --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \
- --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \
- --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \
- --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe
- # via readme-renderer
-nox==2024.4.15 \
- --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \
- --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f
- # via -r requirements.in
-packaging==24.1 \
- --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \
- --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124
- # via
- # gcp-releasetool
- # nox
-pkginfo==1.10.0 \
- --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \
- --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097
- # via twine
-platformdirs==4.2.2 \
- --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \
- --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3
- # via virtualenv
-proto-plus==1.24.0 \
- --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \
- --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12
- # via google-api-core
-protobuf==5.27.2 \
- --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \
- --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \
- --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \
- --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \
- --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \
- --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \
- --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \
- --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \
- --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \
- --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \
- --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714
- # via
- # gcp-docuploader
- # gcp-releasetool
- # google-api-core
- # googleapis-common-protos
- # proto-plus
-pyasn1==0.6.0 \
- --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \
- --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473
- # via
- # pyasn1-modules
- # rsa
-pyasn1-modules==0.4.0 \
- --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \
- --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b
- # via google-auth
-pycparser==2.22 \
- --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \
- --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc
- # via cffi
-pygments==2.18.0 \
- --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \
- --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a
- # via
- # readme-renderer
- # rich
-pyjwt==2.8.0 \
- --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \
- --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320
- # via gcp-releasetool
-pyperclip==1.9.0 \
- --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310
- # via gcp-releasetool
-python-dateutil==2.9.0.post0 \
- --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \
- --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427
- # via gcp-releasetool
-readme-renderer==44.0 \
- --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \
- --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1
- # via twine
-requests==2.32.3 \
- --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \
- --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6
- # via
- # gcp-releasetool
- # google-api-core
- # google-cloud-storage
- # requests-toolbelt
- # twine
-requests-toolbelt==1.0.0 \
- --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \
- --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06
- # via twine
-rfc3986==2.0.0 \
- --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \
- --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c
- # via twine
-rich==13.7.1 \
- --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \
- --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432
- # via twine
-rsa==4.9 \
- --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
- --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
- # via google-auth
-secretstorage==3.3.3 \
- --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \
- --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99
- # via keyring
-six==1.16.0 \
- --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
- --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
- # via
- # gcp-docuploader
- # python-dateutil
-tomli==2.0.1 \
- --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
- --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
- # via nox
-twine==5.1.1 \
- --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \
- --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db
- # via -r requirements.in
-typing-extensions==4.12.2 \
- --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \
- --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8
- # via -r requirements.in
-urllib3==2.2.2 \
- --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \
- --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168
- # via
- # requests
- # twine
-virtualenv==20.26.3 \
- --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \
- --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589
- # via nox
-wheel==0.43.0 \
- --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \
- --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81
- # via -r requirements.in
-zipp==3.19.2 \
- --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \
- --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c
- # via importlib-metadata
-
-# The following packages are considered to be unsafe in a requirements file:
-setuptools==70.2.0 \
- --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \
- --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1
- # via -r requirements.in
diff --git a/.kokoro/samples/python3.13/common.cfg b/.kokoro/samples/python3.13/common.cfg
new file mode 100644
index 00000000..2a4199f4
--- /dev/null
+++ b/.kokoro/samples/python3.13/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.13"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-313"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-api-core/.kokoro/trampoline_v2.sh"
diff --git a/.kokoro/samples/python3.13/continuous.cfg b/.kokoro/samples/python3.13/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.13/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.13/periodic-head.cfg b/.kokoro/samples/python3.13/periodic-head.cfg
new file mode 100644
index 00000000..a18c0cfc
--- /dev/null
+++ b/.kokoro/samples/python3.13/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-api-core/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.13/periodic.cfg b/.kokoro/samples/python3.13/periodic.cfg
new file mode 100644
index 00000000..71cd1e59
--- /dev/null
+++ b/.kokoro/samples/python3.13/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.13/presubmit.cfg b/.kokoro/samples/python3.13/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.13/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
index 55910c8b..53e365bc 100755
--- a/.kokoro/test-samples-impl.sh
+++ b/.kokoro/test-samples-impl.sh
@@ -33,7 +33,8 @@ export PYTHONUNBUFFERED=1
env | grep KOKORO
# Install nox
-python3.9 -m pip install --upgrade --quiet nox
+# `virtualenv==20.26.6` is added for Python 3.7 compatibility
+python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6
# Use secrets acessor service account to get secrets
if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a1552b53..aab5d53f 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,88 @@
[1]: https://pypi.org/project/google-api-core/#history
+## [2.25.1](https://github.com/googleapis/python-api-core/compare/v2.25.0...v2.25.1) (2025-06-02)
+
+
+### Bug Fixes
+
+* Allow BackgroundConsumer To Inform Caller of Fatal Exceptions with Optional Callback ([3206c01](https://github.com/googleapis/python-api-core/commit/3206c0170dda80a613bf257ebcf3b78c1a20465f))
+
+## [2.25.0](https://github.com/googleapis/python-api-core/compare/v2.24.2...v2.25.0) (2025-05-06)
+
+
+### Features
+
+* Add protobuf runtime version to `x-goog-api-client` header ([#812](https://github.com/googleapis/python-api-core/issues/812)) ([118bd96](https://github.com/googleapis/python-api-core/commit/118bd96f3907234351972409834ab5309cdfcee4))
+* Support dynamic retry backoff values ([#793](https://github.com/googleapis/python-api-core/issues/793)) ([70697a3](https://github.com/googleapis/python-api-core/commit/70697a3e39c389768e724fddacb3c9b97d609384))
+
+
+### Bug Fixes
+
+* Resolve issue where pre-release versions of dependencies are installed ([#808](https://github.com/googleapis/python-api-core/issues/808)) ([1ca7973](https://github.com/googleapis/python-api-core/commit/1ca7973a395099403be1a99c7c4583a8f22d5d8e))
+
+## [2.24.2](https://github.com/googleapis/python-api-core/compare/v2.24.1...v2.24.2) (2025-03-06)
+
+
+### Bug Fixes
+
+* **deps:** Allow protobuf 6.x ([#804](https://github.com/googleapis/python-api-core/issues/804)) ([687be7c](https://github.com/googleapis/python-api-core/commit/687be7cbf629a61feb43ef37d3d920fa32b2d636))
+
+## [2.24.1](https://github.com/googleapis/python-api-core/compare/v2.24.0...v2.24.1) (2025-01-24)
+
+
+### Bug Fixes
+
+* Memory leak in bidi classes ([#770](https://github.com/googleapis/python-api-core/issues/770)) ([c1b8afa](https://github.com/googleapis/python-api-core/commit/c1b8afa4e2abe256e70651defccdc285f104ed19))
+* Resolve the issue where rpc timeout of 0 is used when timeout expires ([#776](https://github.com/googleapis/python-api-core/issues/776)) ([a5604a5](https://github.com/googleapis/python-api-core/commit/a5604a55070c6d92618d078191bf99f4c168d5f6))
+
+
+### Documentation
+
+* Add warnings regarding consuming externally sourced credentials ([#783](https://github.com/googleapis/python-api-core/issues/783)) ([0ec1825](https://github.com/googleapis/python-api-core/commit/0ec18254b90721684679a98bcacef4615467a227))
+
+## [2.24.0](https://github.com/googleapis/python-api-core/compare/v2.23.0...v2.24.0) (2024-12-06)
+
+
+### Features
+
+* Add automatic logging config to support debug logging ([#754](https://github.com/googleapis/python-api-core/issues/754)) ([d18d9b5](https://github.com/googleapis/python-api-core/commit/d18d9b5131162b44eebcc0859a7aca1198a2ac06))
+* Update recognized logging fields ([#766](https://github.com/googleapis/python-api-core/issues/766)) ([5f80f77](https://github.com/googleapis/python-api-core/commit/5f80f778bc25d878b3187c6138077ad8c6bcd35f))
+
+## [2.23.0](https://github.com/googleapis/python-api-core/compare/v2.22.0...v2.23.0) (2024-11-11)
+
+
+### Features
+
+* Migrate to pyproject.toml ([#736](https://github.com/googleapis/python-api-core/issues/736)) ([159e9a4](https://github.com/googleapis/python-api-core/commit/159e9a49525937f18a55c38136aae32575424d55))
+
+## [2.22.0](https://github.com/googleapis/python-api-core/compare/v2.21.0...v2.22.0) (2024-10-25)
+
+
+### Features
+
+* Add support for python 3.13 ([#696](https://github.com/googleapis/python-api-core/issues/696)) ([46b3d3a](https://github.com/googleapis/python-api-core/commit/46b3d3abaa1bae28e9d788d7c3006224cd6f74d5))
+
+
+### Bug Fixes
+
+* Add type hints to ClientOptions ([#735](https://github.com/googleapis/python-api-core/issues/735)) ([b91ed19](https://github.com/googleapis/python-api-core/commit/b91ed19210148dfa49ec790c4dd5f4a7bff80954))
+* Improve `Any` decode error ([#712](https://github.com/googleapis/python-api-core/issues/712)) ([0d5ed37](https://github.com/googleapis/python-api-core/commit/0d5ed37c96f9b40bccae98e228163a88abeb1763))
+* Require proto-plus >= 1.25.0 for Python 3.13 ([#740](https://github.com/googleapis/python-api-core/issues/740)) ([a26313e](https://github.com/googleapis/python-api-core/commit/a26313e1cb12e44aa498f12622edccc0c83ba0c3))
+* Switch to unittest.mock from mock ([#713](https://github.com/googleapis/python-api-core/issues/713)) ([8c53381](https://github.com/googleapis/python-api-core/commit/8c533819b7e212aa2f1d695a7ce08629f4fb2daf))
+
+## [2.21.0](https://github.com/googleapis/python-api-core/compare/v2.20.0...v2.21.0) (2024-10-07)
+
+
+### Features
+
+* Add support for asynchronous long running operations ([#724](https://github.com/googleapis/python-api-core/issues/724)) ([aaed69b](https://github.com/googleapis/python-api-core/commit/aaed69b6f1d694cd7e561e2aa03fdd8d6cfb369a))
+
+
+### Bug Fixes
+
+* Set chunk size for async stream content ([#702](https://github.com/googleapis/python-api-core/issues/702)) ([45b8a6d](https://github.com/googleapis/python-api-core/commit/45b8a6db5a5c75acdd8be896d0152f11608c7e51))
+
## [2.20.0](https://github.com/googleapis/python-api-core/compare/v2.19.2...v2.20.0) (2024-09-18)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 8d1475ce..1a1f608b 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -21,7 +21,7 @@ In order to add a feature:
documentation.
- The feature must work fully on the following CPython versions:
- 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows.
+ 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -71,7 +71,7 @@ We use `nox `__ to instrument our tests.
- To run a single unit test::
- $ nox -s unit-3.12 -- -k
+ $ nox -s unit-3.13 -- -k
.. note::
@@ -203,6 +203,7 @@ We support:
- `Python 3.10`_
- `Python 3.11`_
- `Python 3.12`_
+- `Python 3.13`_
.. _Python 3.7: https://docs.python.org/3.7/
.. _Python 3.8: https://docs.python.org/3.8/
@@ -210,6 +211,7 @@ We support:
.. _Python 3.10: https://docs.python.org/3.10/
.. _Python 3.11: https://docs.python.org/3.11/
.. _Python 3.12: https://docs.python.org/3.12/
+.. _Python 3.13: https://docs.python.org/3.13/
Supported versions can be found in our ``noxfile.py`` `config`_.
diff --git a/google/api_core/bidi.py b/google/api_core/bidi.py
index 78d98b98..bed4c70e 100644
--- a/google/api_core/bidi.py
+++ b/google/api_core/bidi.py
@@ -306,6 +306,8 @@ def close(self):
self._request_queue.put(None)
self.call.cancel()
self._request_generator = None
+ self._initial_request = None
+ self._callbacks = []
# Don't set self.call to None. Keep it around so that send/recv can
# raise the error.
@@ -622,12 +624,15 @@ def on_response(response):
``open()``ed yet.
on_response (Callable[[protobuf.Message], None]): The callback to
be called for every response on the stream.
+ on_fatal_exception (Callable[[Exception], None]): The callback to
+ be called on fatal errors during consumption. Default None.
"""
- def __init__(self, bidi_rpc, on_response):
+ def __init__(self, bidi_rpc, on_response, on_fatal_exception=None):
self._bidi_rpc = bidi_rpc
self._on_response = on_response
self._paused = False
+ self._on_fatal_exception = on_fatal_exception
self._wake = threading.Condition()
self._thread = None
self._operational_lock = threading.Lock()
@@ -662,7 +667,8 @@ def _thread_main(self, ready):
_LOGGER.debug("waiting for recv.")
response = self._bidi_rpc.recv()
_LOGGER.debug("recved response.")
- self._on_response(response)
+ if self._on_response is not None:
+ self._on_response(response)
except exceptions.GoogleAPICallError as exc:
_LOGGER.debug(
@@ -673,6 +679,8 @@ def _thread_main(self, ready):
exc,
exc_info=True,
)
+ if self._on_fatal_exception is not None:
+ self._on_fatal_exception(exc)
except Exception as exc:
_LOGGER.exception(
@@ -680,6 +688,8 @@ def _thread_main(self, ready):
_BIDIRECTIONAL_CONSUMER_NAME,
exc,
)
+ if self._on_fatal_exception is not None:
+ self._on_fatal_exception(exc)
_LOGGER.info("%s exiting", _BIDIRECTIONAL_CONSUMER_NAME)
@@ -691,8 +701,8 @@ def start(self):
name=_BIDIRECTIONAL_CONSUMER_NAME,
target=self._thread_main,
args=(ready,),
+ daemon=True,
)
- thread.daemon = True
thread.start()
# Other parts of the code rely on `thread.is_alive` which
# isn't sufficient to know if a thread is active, just that it may
@@ -703,7 +713,11 @@ def start(self):
_LOGGER.debug("Started helper thread %s", thread.name)
def stop(self):
- """Stop consuming the stream and shutdown the background thread."""
+ """Stop consuming the stream and shutdown the background thread.
+
+ NOTE: Cannot be called within `_thread_main`, since it is not
+ possible to join a thread to itself.
+ """
with self._operational_lock:
self._bidi_rpc.close()
@@ -717,6 +731,8 @@ def stop(self):
_LOGGER.warning("Background thread did not exit.")
self._thread = None
+ self._on_response = None
+ self._on_fatal_exception = None
@property
def is_active(self):
diff --git a/google/api_core/client_info.py b/google/api_core/client_info.py
index 48326799..f0678d24 100644
--- a/google/api_core/client_info.py
+++ b/google/api_core/client_info.py
@@ -57,7 +57,9 @@ class ClientInfo(object):
user_agent (Optional[str]): Prefix to the user agent header. This is
used to supply information such as application name or partner tool.
Recommended format: ``application-or-tool-ID/major.minor.version``.
- rest_version (Optional[str]): The requests library version.
+ rest_version (Optional[str]): A string with labeled versions of the
+ dependencies used for REST transport.
+ protobuf_runtime_version (Optional[str]): The protobuf runtime version.
"""
def __init__(
@@ -69,6 +71,7 @@ def __init__(
client_library_version=None,
user_agent=None,
rest_version=None,
+ protobuf_runtime_version=None,
):
self.python_version = python_version
self.grpc_version = grpc_version
@@ -77,6 +80,7 @@ def __init__(
self.client_library_version = client_library_version
self.user_agent = user_agent
self.rest_version = rest_version
+ self.protobuf_runtime_version = protobuf_runtime_version
def to_user_agent(self):
"""Returns the user-agent string for this client info."""
@@ -104,4 +108,7 @@ def to_user_agent(self):
if self.client_library_version is not None:
ua += "gccl/{client_library_version} "
+ if self.protobuf_runtime_version is not None:
+ ua += "pb/{protobuf_runtime_version} "
+
return ua.format(**self.__dict__).strip()
diff --git a/google/api_core/client_logging.py b/google/api_core/client_logging.py
new file mode 100644
index 00000000..837e3e0c
--- /dev/null
+++ b/google/api_core/client_logging.py
@@ -0,0 +1,144 @@
+import logging
+import json
+import os
+
+from typing import List, Optional
+
+_LOGGING_INITIALIZED = False
+_BASE_LOGGER_NAME = "google"
+
+# Fields to be included in the StructuredLogFormatter.
+#
+# TODO(https://github.com/googleapis/python-api-core/issues/761): Update this list to support additional logging fields.
+_recognized_logging_fields = [
+ "httpRequest",
+ "rpcName",
+ "serviceName",
+ "credentialsType",
+ "credentialsInfo",
+ "universeDomain",
+ "request",
+ "response",
+ "metadata",
+ "retryAttempt",
+ "httpResponse",
+] # Additional fields to be Logged.
+
+
+def logger_configured(logger) -> bool:
+ """Determines whether `logger` has non-default configuration
+
+ Args:
+ logger: The logger to check.
+
+ Returns:
+ bool: Whether the logger has any non-default configuration.
+ """
+ return (
+ logger.handlers != [] or logger.level != logging.NOTSET or not logger.propagate
+ )
+
+
+def initialize_logging():
+ """Initializes "google" loggers, partly based on the environment variable
+
+ Initializes the "google" logger and any loggers (at the "google"
+ level or lower) specified by the environment variable
+ GOOGLE_SDK_PYTHON_LOGGING_SCOPE, as long as none of these loggers
+ were previously configured. If any such loggers (including the
+ "google" logger) are initialized, they are set to NOT propagate
+ log events up to their parent loggers.
+
+ This initialization is executed only once, and hence the
+ environment variable is only processed the first time this
+ function is called.
+ """
+ global _LOGGING_INITIALIZED
+ if _LOGGING_INITIALIZED:
+ return
+ scopes = os.getenv("GOOGLE_SDK_PYTHON_LOGGING_SCOPE", "")
+ setup_logging(scopes)
+ _LOGGING_INITIALIZED = True
+
+
+def parse_logging_scopes(scopes: Optional[str] = None) -> List[str]:
+ """Returns a list of logger names.
+
+ Splits the single string of comma-separated logger names into a list of individual logger name strings.
+
+ Args:
+ scopes: The name of a single logger. (In the future, this will be a comma-separated list of multiple loggers.)
+
+ Returns:
+ A list of all the logger names in scopes.
+ """
+ if not scopes:
+ return []
+ # TODO(https://github.com/googleapis/python-api-core/issues/759): check if the namespace is a valid namespace.
+ # TODO(b/380481951): Support logging multiple scopes.
+ # TODO(b/380483756): Raise or log a warning for an invalid scope.
+ namespaces = [scopes]
+ return namespaces
+
+
+def configure_defaults(logger):
+ """Configures `logger` to emit structured info to stdout."""
+ if not logger_configured(logger):
+ console_handler = logging.StreamHandler()
+ logger.setLevel("DEBUG")
+ logger.propagate = False
+ formatter = StructuredLogFormatter()
+ console_handler.setFormatter(formatter)
+ logger.addHandler(console_handler)
+
+
+def setup_logging(scopes: str = ""):
+ """Sets up logging for the specified `scopes`.
+
+ If the loggers specified in `scopes` have not been previously
+ configured, this will configure them to emit structured log
+ entries to stdout, and to not propagate their log events to their
+ parent loggers. Additionally, if the "google" logger (whether it
+ was specified in `scopes` or not) was not previously configured,
+ it will also configure it to not propagate log events to the root
+ logger.
+
+ Args:
+ scopes: The name of a single logger. (In the future, this will be a comma-separated list of multiple loggers.)
+
+ """
+
+ # only returns valid logger scopes (namespaces)
+ # this list has at most one element.
+ logger_names = parse_logging_scopes(scopes)
+
+ for namespace in logger_names:
+ # This will either create a module level logger or get the reference of the base logger instantiated above.
+ logger = logging.getLogger(namespace)
+
+ # Configure default settings.
+ configure_defaults(logger)
+
+ # disable log propagation at base logger level to the root logger only if a base logger is not already configured via code changes.
+ base_logger = logging.getLogger(_BASE_LOGGER_NAME)
+ if not logger_configured(base_logger):
+ base_logger.propagate = False
+
+
+# TODO(https://github.com/googleapis/python-api-core/issues/763): Expand documentation.
+class StructuredLogFormatter(logging.Formatter):
+ # TODO(https://github.com/googleapis/python-api-core/issues/761): ensure that additional fields such as
+ # function name, file name, and line no. appear in a log output.
+ def format(self, record: logging.LogRecord):
+ log_obj = {
+ "timestamp": self.formatTime(record),
+ "severity": record.levelname,
+ "name": record.name,
+ "message": record.getMessage(),
+ }
+
+ for field_name in _recognized_logging_fields:
+ value = getattr(record, field_name, None)
+ if value is not None:
+ log_obj[field_name] = value
+ return json.dumps(log_obj)
diff --git a/google/api_core/client_options.py b/google/api_core/client_options.py
index e93f9586..d11665d2 100644
--- a/google/api_core/client_options.py
+++ b/google/api_core/client_options.py
@@ -48,6 +48,8 @@ def get_client_cert():
"""
+from typing import Callable, Mapping, Optional, Sequence, Tuple
+
class ClientOptions(object):
"""Client Options used to set options on clients.
@@ -55,11 +57,11 @@ class ClientOptions(object):
Args:
api_endpoint (Optional[str]): The desired API endpoint, e.g.,
compute.googleapis.com
- client_cert_source (Optional[Callable[[], (bytes, bytes)]]): A callback
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback
which returns client certificate bytes and private key bytes both in
PEM format. ``client_cert_source`` and ``client_encrypted_cert_source``
are mutually exclusive.
- client_encrypted_cert_source (Optional[Callable[[], (str, str, bytes)]]):
+ client_encrypted_cert_source (Optional[Callable[[], Tuple[str, str, bytes]]]):
A callback which returns client certificate file path, encrypted
private key file path, and the passphrase bytes.``client_cert_source``
and ``client_encrypted_cert_source`` are mutually exclusive.
@@ -67,6 +69,18 @@ class ClientOptions(object):
quota belongs to.
credentials_file (Optional[str]): A path to a file storing credentials.
``credentials_file` and ``api_key`` are mutually exclusive.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
scopes (Optional[Sequence[str]]): OAuth access token override scopes.
api_key (Optional[str]): Google API key. ``credentials_file`` and
``api_key`` are mutually exclusive.
@@ -88,15 +102,17 @@ class ClientOptions(object):
def __init__(
self,
- api_endpoint=None,
- client_cert_source=None,
- client_encrypted_cert_source=None,
- quota_project_id=None,
- credentials_file=None,
- scopes=None,
- api_key=None,
- api_audience=None,
- universe_domain=None,
+ api_endpoint: Optional[str] = None,
+ client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+ client_encrypted_cert_source: Optional[
+ Callable[[], Tuple[str, str, bytes]]
+ ] = None,
+ quota_project_id: Optional[str] = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ api_key: Optional[str] = None,
+ api_audience: Optional[str] = None,
+ universe_domain: Optional[str] = None,
):
if client_cert_source and client_encrypted_cert_source:
raise ValueError(
@@ -114,11 +130,11 @@ def __init__(
self.api_audience = api_audience
self.universe_domain = universe_domain
- def __repr__(self):
+ def __repr__(self) -> str:
return "ClientOptions: " + repr(self.__dict__)
-def from_dict(options):
+def from_dict(options: Mapping[str, object]) -> ClientOptions:
"""Construct a client options object from a mapping object.
Args:
diff --git a/google/api_core/exceptions.py b/google/api_core/exceptions.py
index 5b25d124..e3eb696c 100644
--- a/google/api_core/exceptions.py
+++ b/google/api_core/exceptions.py
@@ -517,14 +517,14 @@ def format_http_response_error(
errors = payload.get("error", {}).get("errors", ())
# In JSON, details are already formatted in developer-friendly way.
details = payload.get("error", {}).get("details", ())
- error_info = list(
+ error_info_list = list(
filter(
lambda detail: detail.get("@type", "")
== "type.googleapis.com/google.rpc.ErrorInfo",
details,
)
)
- error_info = error_info[0] if error_info else None
+ error_info = error_info_list[0] if error_info_list else None
message = _format_rest_error_message(error_message, method, url)
exception = from_http_status(
diff --git a/google/api_core/gapic_v1/client_info.py b/google/api_core/gapic_v1/client_info.py
index 2de1be7f..4b3b5649 100644
--- a/google/api_core/gapic_v1/client_info.py
+++ b/google/api_core/gapic_v1/client_info.py
@@ -45,6 +45,9 @@ class ClientInfo(client_info.ClientInfo):
user_agent (Optional[str]): Prefix to the user agent header. This is
used to supply information such as application name or partner tool.
Recommended format: ``application-or-tool-ID/major.minor.version``.
+ rest_version (Optional[str]): A string with labeled versions of the
+ dependencies used for REST transport.
+ protobuf_runtime_version (Optional[str]): The protobuf runtime version.
"""
def to_grpc_metadata(self):
diff --git a/google/api_core/grpc_helpers.py b/google/api_core/grpc_helpers.py
index 1dcbb8b9..07963024 100644
--- a/google/api_core/grpc_helpers.py
+++ b/google/api_core/grpc_helpers.py
@@ -216,6 +216,18 @@ def _create_composite_credentials(
credentials_file (str): A file with credentials that can be loaded with
:func:`google.auth.load_credentials_from_file`. This argument is
mutually exclusive with credentials.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
default_scopes (Sequence[str]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
@@ -316,6 +328,18 @@ def create_channel(
credentials_file (str): A file with credentials that can be loaded with
:func:`google.auth.load_credentials_from_file`. This argument is
mutually exclusive with credentials.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
quota_project_id (str): An optional project to use for billing and quota.
default_scopes (Sequence[str]): Default scopes passed by a Google client
library. Use 'scopes' for user-defined scopes.
diff --git a/google/api_core/grpc_helpers_async.py b/google/api_core/grpc_helpers_async.py
index 6feb2229..af661430 100644
--- a/google/api_core/grpc_helpers_async.py
+++ b/google/api_core/grpc_helpers_async.py
@@ -152,9 +152,9 @@ class _WrappedStreamStreamCall(
# public type alias denoting the return type of async streaming gapic calls
-GrpcAsyncStream = _WrappedStreamResponseMixin[P]
+GrpcAsyncStream = _WrappedStreamResponseMixin
# public type alias denoting the return type of unary gapic calls
-AwaitableGrpcCall = _WrappedUnaryResponseMixin[P]
+AwaitableGrpcCall = _WrappedUnaryResponseMixin
def _wrap_unary_errors(callable_):
@@ -236,6 +236,18 @@ def create_channel(
credentials_file (str): A file with credentials that can be loaded with
:func:`google.auth.load_credentials_from_file`. This argument is
mutually exclusive with credentials.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
quota_project_id (str): An optional project to use for billing and quota.
default_scopes (Sequence[str]): Default scopes passed by a Google client
library. Use 'scopes' for user-defined scopes.
diff --git a/google/api_core/operations_v1/__init__.py b/google/api_core/operations_v1/__init__.py
index 8b75426b..4db32a4c 100644
--- a/google/api_core/operations_v1/__init__.py
+++ b/google/api_core/operations_v1/__init__.py
@@ -14,9 +14,7 @@
"""Package for interacting with the google.longrunning.operations meta-API."""
-from google.api_core.operations_v1.abstract_operations_client import (
- AbstractOperationsClient,
-)
+from google.api_core.operations_v1.abstract_operations_client import AbstractOperationsClient
from google.api_core.operations_v1.operations_async_client import OperationsAsyncClient
from google.api_core.operations_v1.operations_client import OperationsClient
from google.api_core.operations_v1.transports.rest import OperationsRestTransport
@@ -25,5 +23,18 @@
"AbstractOperationsClient",
"OperationsAsyncClient",
"OperationsClient",
- "OperationsRestTransport",
+ "OperationsRestTransport"
]
+
+try:
+ from google.api_core.operations_v1.transports.rest_asyncio import (
+ AsyncOperationsRestTransport,
+ )
+ from google.api_core.operations_v1.operations_rest_client_async import AsyncOperationsRestClient
+
+ __all__ += ["AsyncOperationsRestClient", "AsyncOperationsRestTransport"]
+except ImportError:
+ # This import requires the `async_rest` extra.
+ # Don't raise an exception if `AsyncOperationsRestTransport` cannot be imported
+ # as other transports are still available.
+ pass
diff --git a/google/api_core/operations_v1/abstract_operations_base_client.py b/google/api_core/operations_v1/abstract_operations_base_client.py
new file mode 100644
index 00000000..160c2a88
--- /dev/null
+++ b/google/api_core/operations_v1/abstract_operations_base_client.py
@@ -0,0 +1,370 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from collections import OrderedDict
+import os
+import re
+from typing import Dict, Optional, Type, Union
+
+from google.api_core import client_options as client_options_lib # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core.operations_v1.transports.base import (
+ DEFAULT_CLIENT_INFO,
+ OperationsTransport,
+)
+from google.api_core.operations_v1.transports.rest import OperationsRestTransport
+
+try:
+ from google.api_core.operations_v1.transports.rest_asyncio import (
+ AsyncOperationsRestTransport,
+ )
+
+ HAS_ASYNC_REST_DEPENDENCIES = True
+except ImportError as e:
+ HAS_ASYNC_REST_DEPENDENCIES = False
+ ASYNC_REST_EXCEPTION = e
+
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
+from google.auth.transport import mtls # type: ignore
+
+
+class AbstractOperationsBaseClientMeta(type):
+ """Metaclass for the Operations Base client.
+
+ This provides base class-level methods for building and retrieving
+ support objects (e.g. transport) without polluting the client instance
+ objects.
+ """
+
+ _transport_registry = OrderedDict() # type: Dict[str, Type[OperationsTransport]]
+ _transport_registry["rest"] = OperationsRestTransport
+ if HAS_ASYNC_REST_DEPENDENCIES:
+ _transport_registry["rest_asyncio"] = AsyncOperationsRestTransport
+
+ def get_transport_class(
+ cls,
+ label: Optional[str] = None,
+ ) -> Type[OperationsTransport]:
+ """Returns an appropriate transport class.
+
+ Args:
+ label: The name of the desired transport. If none is
+ provided, then the first transport in the registry is used.
+
+ Returns:
+ The transport class to use.
+ """
+ # If a specific transport is requested, return that one.
+ if (
+ label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES
+ ): # pragma: NO COVER
+ raise ASYNC_REST_EXCEPTION
+
+ if label:
+ return cls._transport_registry[label]
+
+ # No transport is requested; return the default (that is, the first one
+ # in the dictionary).
+ return next(iter(cls._transport_registry.values()))
+
+
+class AbstractOperationsBaseClient(metaclass=AbstractOperationsBaseClientMeta):
+ """Manages long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+ """
+
+ @staticmethod
+ def _get_default_mtls_endpoint(api_endpoint):
+ """Converts api endpoint to mTLS endpoint.
+
+ Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+ "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+ Args:
+ api_endpoint (Optional[str]): the api endpoint to convert.
+ Returns:
+ str: converted mTLS api endpoint.
+ """
+ if not api_endpoint:
+ return api_endpoint
+
+ mtls_endpoint_re = re.compile(
+ r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?"
+ )
+
+ m = mtls_endpoint_re.match(api_endpoint)
+ name, mtls, sandbox, googledomain = m.groups()
+ if mtls or not googledomain:
+ return api_endpoint
+
+ if sandbox:
+ return api_endpoint.replace(
+ "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+ )
+
+ return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+ DEFAULT_ENDPOINT = "longrunning.googleapis.com"
+ DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
+ DEFAULT_ENDPOINT
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """
+ This class method should be overridden by the subclasses.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Raises:
+ NotImplementedError: If the method is called on the base class.
+ """
+ raise NotImplementedError("`from_service_account_info` is not implemented.")
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """
+ This class method should be overridden by the subclasses.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Raises:
+ NotImplementedError: If the method is called on the base class.
+ """
+ raise NotImplementedError("`from_service_account_file` is not implemented.")
+
+ from_service_account_json = from_service_account_file
+
+ @property
+ def transport(self) -> OperationsTransport:
+ """Returns the transport used by the client instance.
+
+ Returns:
+ OperationsTransport: The transport used by the client
+ instance.
+ """
+ return self._transport
+
+ @staticmethod
+ def common_billing_account_path(
+ billing_account: str,
+ ) -> str:
+ """Returns a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(
+ folder: str,
+ ) -> str:
+ """Returns a fully-qualified folder string."""
+ return "folders/{folder}".format(
+ folder=folder,
+ )
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(
+ organization: str,
+ ) -> str:
+ """Returns a fully-qualified organization string."""
+ return "organizations/{organization}".format(
+ organization=organization,
+ )
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(
+ project: str,
+ ) -> str:
+ """Returns a fully-qualified project string."""
+ return "projects/{project}".format(
+ project=project,
+ )
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(
+ project: str,
+ location: str,
+ ) -> str:
+ """Returns a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project,
+ location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ def __init__(
+ self,
+ *,
+ credentials: Optional[ga_credentials.Credentials] = None,
+ transport: Union[str, OperationsTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiates the operations client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, OperationsTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ if isinstance(client_options, dict):
+ client_options = client_options_lib.from_dict(client_options)
+ if client_options is None:
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = os.getenv(
+ "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"
+ ).lower()
+ if use_client_cert not in ("true", "false"):
+ raise ValueError(
+ "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
+ )
+ client_cert_source_func = None
+ is_mtls = False
+ if use_client_cert == "true":
+ if client_options.client_cert_source:
+ is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
+ else:
+ is_mtls = mtls.has_default_client_cert_source()
+ if is_mtls:
+ client_cert_source_func = mtls.default_client_cert_source()
+ else:
+ client_cert_source_func = None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+ if use_mtls_env == "never":
+ api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ if is_mtls:
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ else:
+ api_endpoint = self.DEFAULT_ENDPOINT
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
+ "values: never, auto, always"
+ )
+
+ # Save or instantiate the transport.
+ # Ordinarily, we provide the transport, but allowing a custom transport
+ # instance provides an extensibility point for unusual situations.
+ if isinstance(transport, OperationsTransport):
+ # transport is a OperationsTransport instance.
+ if credentials or client_options.credentials_file:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its credentials directly."
+ )
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, provide its scopes "
+ "directly."
+ )
+ self._transport = transport
+ else:
+ Transport = type(self).get_transport_class(transport)
+ self._transport = Transport(
+ credentials=credentials,
+ credentials_file=client_options.credentials_file,
+ host=api_endpoint,
+ scopes=client_options.scopes,
+ client_cert_source_for_mtls=client_cert_source_func,
+ quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
+ always_use_jwt_access=True,
+ )
diff --git a/google/api_core/operations_v1/abstract_operations_client.py b/google/api_core/operations_v1/abstract_operations_client.py
index 38f532af..fc445362 100644
--- a/google/api_core/operations_v1/abstract_operations_client.py
+++ b/google/api_core/operations_v1/abstract_operations_client.py
@@ -13,10 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-from collections import OrderedDict
-import os
-import re
-from typing import Dict, Optional, Sequence, Tuple, Type, Union
+from typing import Optional, Sequence, Tuple, Union
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import gapic_v1 # type: ignore
@@ -26,10 +23,10 @@
DEFAULT_CLIENT_INFO,
OperationsTransport,
)
-from google.api_core.operations_v1.transports.rest import OperationsRestTransport
+from google.api_core.operations_v1.abstract_operations_base_client import (
+ AbstractOperationsBaseClient,
+)
from google.auth import credentials as ga_credentials # type: ignore
-from google.auth.exceptions import MutualTLSChannelError # type: ignore
-from google.auth.transport import mtls # type: ignore
from google.longrunning import operations_pb2
from google.oauth2 import service_account # type: ignore
import grpc
@@ -37,40 +34,7 @@
OptionalRetry = Union[retries.Retry, object]
-class AbstractOperationsClientMeta(type):
- """Metaclass for the Operations client.
-
- This provides class-level methods for building and retrieving
- support objects (e.g. transport) without polluting the client instance
- objects.
- """
-
- _transport_registry = OrderedDict() # type: Dict[str, Type[OperationsTransport]]
- _transport_registry["rest"] = OperationsRestTransport
-
- def get_transport_class(
- cls,
- label: Optional[str] = None,
- ) -> Type[OperationsTransport]:
- """Returns an appropriate transport class.
-
- Args:
- label: The name of the desired transport. If none is
- provided, then the first transport in the registry is used.
-
- Returns:
- The transport class to use.
- """
- # If a specific transport is requested, return that one.
- if label:
- return cls._transport_registry[label]
-
- # No transport is requested; return the default (that is, the first one
- # in the dictionary).
- return next(iter(cls._transport_registry.values()))
-
-
-class AbstractOperationsClient(metaclass=AbstractOperationsClientMeta):
+class AbstractOperationsClient(AbstractOperationsBaseClient):
"""Manages long-running operations with an API service.
When an API method normally takes long time to complete, it can be
@@ -83,165 +47,6 @@ class AbstractOperationsClient(metaclass=AbstractOperationsClientMeta):
interface so developers can have a consistent client experience.
"""
- @staticmethod
- def _get_default_mtls_endpoint(api_endpoint):
- """Converts api endpoint to mTLS endpoint.
-
- Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
- "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
- Args:
- api_endpoint (Optional[str]): the api endpoint to convert.
- Returns:
- str: converted mTLS api endpoint.
- """
- if not api_endpoint:
- return api_endpoint
-
- mtls_endpoint_re = re.compile(
- r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?"
- )
-
- m = mtls_endpoint_re.match(api_endpoint)
- name, mtls, sandbox, googledomain = m.groups()
- if mtls or not googledomain:
- return api_endpoint
-
- if sandbox:
- return api_endpoint.replace(
- "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
- )
-
- return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
-
- DEFAULT_ENDPOINT = "longrunning.googleapis.com"
- DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
- DEFAULT_ENDPOINT
- )
-
- @classmethod
- def from_service_account_info(cls, info: dict, *args, **kwargs):
- """Creates an instance of this client using the provided credentials
- info.
-
- Args:
- info (dict): The service account private key info.
- args: Additional arguments to pass to the constructor.
- kwargs: Additional arguments to pass to the constructor.
-
- Returns:
- AbstractOperationsClient: The constructed client.
- """
- credentials = service_account.Credentials.from_service_account_info(info)
- kwargs["credentials"] = credentials
- return cls(*args, **kwargs)
-
- @classmethod
- def from_service_account_file(cls, filename: str, *args, **kwargs):
- """Creates an instance of this client using the provided credentials
- file.
-
- Args:
- filename (str): The path to the service account private key json
- file.
- args: Additional arguments to pass to the constructor.
- kwargs: Additional arguments to pass to the constructor.
-
- Returns:
- AbstractOperationsClient: The constructed client.
- """
- credentials = service_account.Credentials.from_service_account_file(filename)
- kwargs["credentials"] = credentials
- return cls(*args, **kwargs)
-
- from_service_account_json = from_service_account_file
-
- @property
- def transport(self) -> OperationsTransport:
- """Returns the transport used by the client instance.
-
- Returns:
- OperationsTransport: The transport used by the client
- instance.
- """
- return self._transport
-
- @staticmethod
- def common_billing_account_path(
- billing_account: str,
- ) -> str:
- """Returns a fully-qualified billing_account string."""
- return "billingAccounts/{billing_account}".format(
- billing_account=billing_account,
- )
-
- @staticmethod
- def parse_common_billing_account_path(path: str) -> Dict[str, str]:
- """Parse a billing_account path into its component segments."""
- m = re.match(r"^billingAccounts/(?P.+?)$", path)
- return m.groupdict() if m else {}
-
- @staticmethod
- def common_folder_path(
- folder: str,
- ) -> str:
- """Returns a fully-qualified folder string."""
- return "folders/{folder}".format(
- folder=folder,
- )
-
- @staticmethod
- def parse_common_folder_path(path: str) -> Dict[str, str]:
- """Parse a folder path into its component segments."""
- m = re.match(r"^folders/(?P.+?)$", path)
- return m.groupdict() if m else {}
-
- @staticmethod
- def common_organization_path(
- organization: str,
- ) -> str:
- """Returns a fully-qualified organization string."""
- return "organizations/{organization}".format(
- organization=organization,
- )
-
- @staticmethod
- def parse_common_organization_path(path: str) -> Dict[str, str]:
- """Parse a organization path into its component segments."""
- m = re.match(r"^organizations/(?P.+?)$", path)
- return m.groupdict() if m else {}
-
- @staticmethod
- def common_project_path(
- project: str,
- ) -> str:
- """Returns a fully-qualified project string."""
- return "projects/{project}".format(
- project=project,
- )
-
- @staticmethod
- def parse_common_project_path(path: str) -> Dict[str, str]:
- """Parse a project path into its component segments."""
- m = re.match(r"^projects/(?P.+?)$", path)
- return m.groupdict() if m else {}
-
- @staticmethod
- def common_location_path(
- project: str,
- location: str,
- ) -> str:
- """Returns a fully-qualified location string."""
- return "projects/{project}/locations/{location}".format(
- project=project,
- location=location,
- )
-
- @staticmethod
- def parse_common_location_path(path: str) -> Dict[str, str]:
- """Parse a location path into its component segments."""
- m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
- return m.groupdict() if m else {}
-
def __init__(
self,
*,
@@ -287,80 +92,49 @@ def __init__(
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
- if isinstance(client_options, dict):
- client_options = client_options_lib.from_dict(client_options)
- if client_options is None:
- client_options = client_options_lib.ClientOptions()
-
- # Create SSL credentials for mutual TLS if needed.
- use_client_cert = os.getenv(
- "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"
- ).lower()
- if use_client_cert not in ("true", "false"):
- raise ValueError(
- "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
- )
- client_cert_source_func = None
- is_mtls = False
- if use_client_cert == "true":
- if client_options.client_cert_source:
- is_mtls = True
- client_cert_source_func = client_options.client_cert_source
- else:
- is_mtls = mtls.has_default_client_cert_source()
- if is_mtls:
- client_cert_source_func = mtls.default_client_cert_source()
- else:
- client_cert_source_func = None
-
- # Figure out which api endpoint to use.
- if client_options.api_endpoint is not None:
- api_endpoint = client_options.api_endpoint
- else:
- use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
- if use_mtls_env == "never":
- api_endpoint = self.DEFAULT_ENDPOINT
- elif use_mtls_env == "always":
- api_endpoint = self.DEFAULT_MTLS_ENDPOINT
- elif use_mtls_env == "auto":
- if is_mtls:
- api_endpoint = self.DEFAULT_MTLS_ENDPOINT
- else:
- api_endpoint = self.DEFAULT_ENDPOINT
- else:
- raise MutualTLSChannelError(
- "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
- "values: never, auto, always"
- )
-
- # Save or instantiate the transport.
- # Ordinarily, we provide the transport, but allowing a custom transport
- # instance provides an extensibility point for unusual situations.
- if isinstance(transport, OperationsTransport):
- # transport is a OperationsTransport instance.
- if credentials or client_options.credentials_file:
- raise ValueError(
- "When providing a transport instance, "
- "provide its credentials directly."
- )
- if client_options.scopes:
- raise ValueError(
- "When providing a transport instance, provide its scopes "
- "directly."
- )
- self._transport = transport
- else:
- Transport = type(self).get_transport_class(transport)
- self._transport = Transport(
- credentials=credentials,
- credentials_file=client_options.credentials_file,
- host=api_endpoint,
- scopes=client_options.scopes,
- client_cert_source_for_mtls=client_cert_source_func,
- quota_project_id=client_options.quota_project_id,
- client_info=client_info,
- always_use_jwt_access=True,
- )
+ super().__init__(
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ AbstractOperationsClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ AbstractOperationsClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_file(filename)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ from_service_account_json = from_service_account_file
def list_operations(
self,
diff --git a/google/api_core/operations_v1/operations_rest_client_async.py b/google/api_core/operations_v1/operations_rest_client_async.py
new file mode 100644
index 00000000..7ab0cd36
--- /dev/null
+++ b/google/api_core/operations_v1/operations_rest_client_async.py
@@ -0,0 +1,345 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from typing import Optional, Sequence, Tuple, Union
+
+from google.api_core import client_options as client_options_lib # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core.operations_v1 import pagers_async as pagers
+from google.api_core.operations_v1.transports.base import (
+ DEFAULT_CLIENT_INFO,
+ OperationsTransport,
+)
+from google.api_core.operations_v1.abstract_operations_base_client import (
+ AbstractOperationsBaseClient,
+)
+from google.longrunning import operations_pb2
+
+try:
+ from google.auth.aio import credentials as ga_credentials # type: ignore
+except ImportError as e: # pragma: NO COVER
+ raise ImportError(
+ "The `async_rest` extra of `google-api-core` is required to use long-running operations. Install it by running "
+ "`pip install google-api-core[async_rest]`."
+ ) from e
+
+
+class AsyncOperationsRestClient(AbstractOperationsBaseClient):
+ """Manages long-running operations with a REST API service for the asynchronous client.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+ """
+
+ def __init__(
+ self,
+ *,
+ credentials: Optional[ga_credentials.Credentials] = None,
+ transport: Union[str, OperationsTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiates the operations client.
+
+ Args:
+ credentials (Optional[google.auth.aio.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, OperationsTransport]): The
+ transport to use. If set to None, this defaults to 'rest_asyncio'.
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ super().__init__(
+ credentials=credentials, # type: ignore
+ # NOTE: If a transport is not provided, we force the client to use the async
+ # REST transport.
+ transport=transport or "rest_asyncio",
+ client_options=client_options,
+ client_info=client_info,
+ )
+
+ async def get_operation(
+ self,
+ name: str,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.Operation:
+ r"""Gets the latest state of a long-running operation.
+ Clients can use this method to poll the operation result
+ at intervals as recommended by the API service.
+
+ Args:
+ name (str):
+ The name of the operation resource.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.longrunning.operations_pb2.Operation:
+ This resource represents a long-
+ running operation that is the result of a
+ network API call.
+
+ """
+
+ request = operations_pb2.GetOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.get_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def list_operations(
+ self,
+ name: str,
+ filter_: Optional[str] = None,
+ *,
+ page_size: Optional[int] = None,
+ page_token: Optional[str] = None,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListOperationsAsyncPager:
+ r"""Lists operations that match the specified filter in the request.
+ If the server doesn't support this method, it returns
+ ``UNIMPLEMENTED``.
+
+ NOTE: the ``name`` binding allows API services to override the
+ binding to use different resource name schemes, such as
+ ``users/*/operations``. To override the binding, API services
+ can add a binding such as ``"/v1/{name=users/*}/operations"`` to
+ their service configuration. For backwards compatibility, the
+ default name includes the operations collection id, however
+ overriding users must ensure the name binding is the parent
+ resource, without the operations collection id.
+
+ Args:
+ name (str):
+ The name of the operation's parent
+ resource.
+ filter_ (str):
+ The standard list filter.
+ This corresponds to the ``filter`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.api_core.operations_v1.pagers.ListOperationsPager:
+ The response message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create a protobuf request object.
+ request = operations_pb2.ListOperationsRequest(name=name, filter=filter_)
+ if page_size is not None:
+ request.page_size = page_size
+ if page_token is not None:
+ request.page_token = page_token
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.list_operations]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.ListOperationsAsyncPager(
+ method=rpc,
+ request=request,
+ response=response,
+ metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def delete_operation(
+ self,
+ name: str,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes a long-running operation. This method indicates that the
+ client is no longer interested in the operation result. It does
+ not cancel the operation. If the server doesn't support this
+ method, it returns ``google.rpc.Code.UNIMPLEMENTED``.
+
+ Args:
+ name (str):
+ The name of the operation resource to
+ be deleted.
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create the request object.
+ request = operations_pb2.DeleteOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.delete_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ await rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
+
+ async def cancel_operation(
+ self,
+ name: Optional[str] = None,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Starts asynchronous cancellation on a long-running operation.
+ The server makes a best effort to cancel the operation, but
+ success is not guaranteed. If the server doesn't support this
+ method, it returns ``google.rpc.Code.UNIMPLEMENTED``. Clients
+ can use
+ [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation]
+ or other methods to check whether the cancellation succeeded or
+ whether the operation completed despite cancellation. On
+ successful cancellation, the operation is not deleted; instead,
+ it becomes an operation with an
+ [Operation.error][google.api_core.operations_v1.Operation.error] value with
+ a [google.rpc.Status.code][google.rpc.Status.code] of 1,
+ corresponding to ``Code.CANCELLED``.
+
+ Args:
+ name (str):
+ The name of the operation resource to
+ be cancelled.
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create the request object.
+ request = operations_pb2.CancelOperationRequest(name=name)
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.cancel_operation]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata or ()) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ await rpc(
+ request,
+ retry=retry,
+ timeout=timeout,
+ metadata=metadata,
+ )
diff --git a/google/api_core/operations_v1/pagers.py b/google/api_core/operations_v1/pagers.py
index b8a47757..132f1c66 100644
--- a/google/api_core/operations_v1/pagers.py
+++ b/google/api_core/operations_v1/pagers.py
@@ -14,7 +14,6 @@
# limitations under the License.
#
from typing import (
- Any,
Callable,
Iterator,
Sequence,
@@ -22,9 +21,10 @@
)
from google.longrunning import operations_pb2
+from google.api_core.operations_v1.pagers_base import ListOperationsPagerBase
-class ListOperationsPager:
+class ListOperationsPager(ListOperationsPagerBase):
"""A pager for iterating through ``list_operations`` requests.
This class thinly wraps an initial
@@ -50,25 +50,9 @@ def __init__(
*,
metadata: Sequence[Tuple[str, str]] = ()
):
- """Instantiate the pager.
-
- Args:
- method (Callable): The method that was originally called, and
- which instantiated this pager.
- request (google.longrunning.operations_pb2.ListOperationsRequest):
- The initial request object.
- response (google.longrunning.operations_pb2.ListOperationsResponse):
- The initial response object.
- metadata (Sequence[Tuple[str, str]]): Strings which should be
- sent along with the request as metadata.
- """
- self._method = method
- self._request = request
- self._response = response
- self._metadata = metadata
-
- def __getattr__(self, name: str) -> Any:
- return getattr(self._response, name)
+ super().__init__(
+ method=method, request=request, response=response, metadata=metadata
+ )
@property
def pages(self) -> Iterator[operations_pb2.ListOperationsResponse]:
@@ -81,6 +65,3 @@ def pages(self) -> Iterator[operations_pb2.ListOperationsResponse]:
def __iter__(self) -> Iterator[operations_pb2.Operation]:
for page in self.pages:
yield from page.operations
-
- def __repr__(self) -> str:
- return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/api_core/operations_v1/pagers_async.py b/google/api_core/operations_v1/pagers_async.py
new file mode 100644
index 00000000..e2909dd5
--- /dev/null
+++ b/google/api_core/operations_v1/pagers_async.py
@@ -0,0 +1,71 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from typing import (
+ Callable,
+ AsyncIterator,
+ Sequence,
+ Tuple,
+)
+
+from google.longrunning import operations_pb2
+from google.api_core.operations_v1.pagers_base import ListOperationsPagerBase
+
+
+class ListOperationsAsyncPager(ListOperationsPagerBase):
+ """A pager for iterating through ``list_operations`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``operations`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListOperations`` requests and continue to iterate
+ through the ``operations`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., operations_pb2.ListOperationsResponse],
+ request: operations_pb2.ListOperationsRequest,
+ response: operations_pb2.ListOperationsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ super().__init__(
+ method=method, request=request, response=response, metadata=metadata
+ )
+
+ @property
+ async def pages(self) -> AsyncIterator[operations_pb2.ListOperationsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]:
+ async def async_generator():
+ async for page in self.pages:
+ for operation in page.operations:
+ yield operation
+
+ return async_generator()
diff --git a/google/api_core/operations_v1/pagers_base.py b/google/api_core/operations_v1/pagers_base.py
new file mode 100644
index 00000000..24caf74f
--- /dev/null
+++ b/google/api_core/operations_v1/pagers_base.py
@@ -0,0 +1,73 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from typing import (
+ Any,
+ Callable,
+ Sequence,
+ Tuple,
+)
+
+from google.longrunning import operations_pb2
+
+
+class ListOperationsPagerBase:
+ """A pager for iterating through ``list_operations`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``operations`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListOperations`` requests and continue to iterate
+ through the ``operations`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., operations_pb2.ListOperationsResponse],
+ request: operations_pb2.ListOperationsRequest,
+ response: operations_pb2.ListOperationsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (google.longrunning.operations_pb2.ListOperationsRequest):
+ The initial request object.
+ response (google.longrunning.operations_pb2.ListOperationsResponse):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = request
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/api_core/operations_v1/transports/__init__.py b/google/api_core/operations_v1/transports/__init__.py
index df53e15e..8c24ce6e 100644
--- a/google/api_core/operations_v1/transports/__init__.py
+++ b/google/api_core/operations_v1/transports/__init__.py
@@ -14,16 +14,26 @@
# limitations under the License.
#
from collections import OrderedDict
+from typing import cast, Dict, Tuple
from .base import OperationsTransport
from .rest import OperationsRestTransport
-
# Compile a registry of transports.
-_transport_registry = OrderedDict()
-_transport_registry["rest"] = OperationsRestTransport
+_transport_registry: Dict[str, OperationsTransport] = OrderedDict()
+_transport_registry["rest"] = cast(OperationsTransport, OperationsRestTransport)
+
+__all__: Tuple[str, ...] = ("OperationsTransport", "OperationsRestTransport")
+
+try:
+ from .rest_asyncio import AsyncOperationsRestTransport
-__all__ = (
- "OperationsTransport",
- "OperationsRestTransport",
-)
+ __all__ += ("AsyncOperationsRestTransport",)
+ _transport_registry["rest_asyncio"] = cast(
+ OperationsTransport, AsyncOperationsRestTransport
+ )
+except ImportError:
+ # This import requires the `async_rest` extra.
+ # Don't raise an exception if `AsyncOperationsRestTransport` cannot be imported
+ # as other transports are still available.
+ pass
diff --git a/google/api_core/operations_v1/transports/base.py b/google/api_core/operations_v1/transports/base.py
index fb1d4fc9..71764c1e 100644
--- a/google/api_core/operations_v1/transports/base.py
+++ b/google/api_core/operations_v1/transports/base.py
@@ -14,6 +14,7 @@
# limitations under the License.
#
import abc
+import re
from typing import Awaitable, Callable, Optional, Sequence, Union
import google.api_core # type: ignore
@@ -25,10 +26,13 @@
from google.auth import credentials as ga_credentials # type: ignore
from google.longrunning import operations_pb2
from google.oauth2 import service_account # type: ignore
-from google.protobuf import empty_pb2 # type: ignore
+import google.protobuf
+from google.protobuf import empty_pb2, json_format # type: ignore
from grpc import Compression
+PROTOBUF_VERSION = google.protobuf.__version__
+
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=version.__version__,
)
@@ -45,12 +49,14 @@ def __init__(
self,
*,
host: str = DEFAULT_HOST,
+ # TODO(https://github.com/googleapis/python-api-core/issues/709): update type hint for credentials to include `google.auth.aio.Credentials`.
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
+ url_scheme="https",
**kwargs,
) -> None:
"""Instantiate the transport.
@@ -66,6 +72,18 @@ def __init__(
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
@@ -76,10 +94,23 @@ def __init__(
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
+ url_scheme: the protocol scheme for the API endpoint. Normally
+ "https", but for testing or local servers,
+ "http" can be specified.
"""
+ maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host)
+ if maybe_url_match is None:
+ raise ValueError(
+ f"Unexpected hostname structure: {host}"
+ ) # pragma: NO COVER
+
+ url_match_items = maybe_url_match.groupdict()
+
+ host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
+
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
- host += ":443"
+ host += ":443" # pragma: NO COVER
self._host = host
scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
@@ -189,6 +220,37 @@ def close(self):
"""
raise NotImplementedError()
+ def _convert_protobuf_message_to_dict(
+ self, message: google.protobuf.message.Message
+ ):
+ r"""Converts protobuf message to a dictionary.
+
+ When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
+
+ Args:
+ message(google.protobuf.message.Message): The protocol buffers message
+ instance to serialize.
+
+ Returns:
+ A dict representation of the protocol buffer message.
+ """
+ # TODO(https://github.com/googleapis/python-api-core/issues/643): For backwards compatibility
+ # with protobuf 3.x 4.x, Remove once support for protobuf 3.x and 4.x is dropped.
+ if PROTOBUF_VERSION[0:2] in ["3.", "4."]:
+ result = json_format.MessageToDict(
+ message,
+ preserving_proto_field_name=True,
+ including_default_value_fields=True, # type: ignore # backward compatibility
+ )
+ else:
+ result = json_format.MessageToDict(
+ message,
+ preserving_proto_field_name=True,
+ always_print_fields_with_no_presence=True,
+ )
+
+ return result
+
@property
def list_operations(
self,
diff --git a/google/api_core/operations_v1/transports/rest.py b/google/api_core/operations_v1/transports/rest.py
index f37bb344..0705c518 100644
--- a/google/api_core/operations_v1/transports/rest.py
+++ b/google/api_core/operations_v1/transports/rest.py
@@ -14,7 +14,6 @@
# limitations under the License.
#
-import re
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from requests import __version__ as requests_version
@@ -41,7 +40,7 @@
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
grpc_version=None,
- rest_version=requests_version,
+ rest_version=f"requests@{requests_version}",
)
@@ -95,6 +94,18 @@ def __init__(
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
+
+ .. warning::
+ Important: If you accept a credential configuration (credential JSON/File/Stream)
+ from an external source for authentication to Google Cloud Platform, you must
+ validate it before providing it to any Google API or client library. Providing an
+ unvalidated credential configuration to Google APIs or libraries can compromise
+ the security of your systems and data. For more information, refer to
+ `Validate credential configurations from external sources`_.
+
+ .. _Validate credential configurations from external sources:
+
+ https://cloud.google.com/docs/authentication/external/externally-sourced-credentials
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client
@@ -123,16 +134,6 @@ def __init__(
# TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc.
# TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the
# credentials object
- maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host)
- if maybe_url_match is None:
- raise ValueError(
- f"Unexpected hostname structure: {host}"
- ) # pragma: NO COVER
-
- url_match_items = maybe_url_match.groupdict()
-
- host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host
-
super().__init__(
host=host,
credentials=credentials,
@@ -144,6 +145,7 @@ def __init__(
)
if client_cert_source_for_mtls:
self._session.configure_mtls_channel(client_cert_source_for_mtls)
+ # TODO(https://github.com/googleapis/python-api-core/issues/720): Add wrap logic directly to the property methods for callables.
self._prep_wrapped_messages(client_info)
self._http_options = http_options or {}
self._path_prefix = path_prefix
@@ -152,6 +154,8 @@ def _list_operations(
self,
request: operations_pb2.ListOperationsRequest,
*,
+ # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry`
+ # to allow configuring retryable error codes.
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
@@ -206,6 +210,7 @@ def _list_operations(
# Send the request
headers = dict(metadata)
headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
response = getattr(self._session, method)(
"{host}{uri}".format(host=self._host, uri=uri),
timeout=timeout,
@@ -227,6 +232,8 @@ def _get_operation(
self,
request: operations_pb2.GetOperationRequest,
*,
+ # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry`
+ # to allow configuring retryable error codes.
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
@@ -282,6 +289,7 @@ def _get_operation(
# Send the request
headers = dict(metadata)
headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
response = getattr(self._session, method)(
"{host}{uri}".format(host=self._host, uri=uri),
timeout=timeout,
@@ -303,6 +311,8 @@ def _delete_operation(
self,
request: operations_pb2.DeleteOperationRequest,
*,
+ # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry`
+ # to allow configuring retryable error codes.
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
@@ -351,6 +361,7 @@ def _delete_operation(
# Send the request
headers = dict(metadata)
headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
response = getattr(self._session, method)(
"{host}{uri}".format(host=self._host, uri=uri),
timeout=timeout,
@@ -369,6 +380,8 @@ def _cancel_operation(
self,
request: operations_pb2.CancelOperationRequest,
*,
+ # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry`
+ # to allow configuring retryable error codes.
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: Optional[float] = None,
compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT,
@@ -426,6 +439,7 @@ def _cancel_operation(
# Send the request
headers = dict(metadata)
headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
response = getattr(self._session, method)(
"{host}{uri}".format(host=self._host, uri=uri),
timeout=timeout,
@@ -441,38 +455,6 @@ def _cancel_operation(
return empty_pb2.Empty()
- def _convert_protobuf_message_to_dict(
- self, message: google.protobuf.message.Message
- ):
- r"""Converts protobuf message to a dictionary.
-
- When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
-
- Args:
- message(google.protobuf.message.Message): The protocol buffers message
- instance to serialize.
-
- Returns:
- A dict representation of the protocol buffer message.
- """
- # For backwards compatibility with protobuf 3.x 4.x
- # Remove once support for protobuf 3.x and 4.x is dropped
- # https://github.com/googleapis/python-api-core/issues/643
- if PROTOBUF_VERSION[0:2] in ["3.", "4."]:
- result = json_format.MessageToDict(
- message,
- preserving_proto_field_name=True,
- including_default_value_fields=True, # type: ignore # backward compatibility
- )
- else:
- result = json_format.MessageToDict(
- message,
- preserving_proto_field_name=True,
- always_print_fields_with_no_presence=True,
- )
-
- return result
-
@property
def list_operations(
self,
diff --git a/google/api_core/operations_v1/transports/rest_asyncio.py b/google/api_core/operations_v1/transports/rest_asyncio.py
new file mode 100644
index 00000000..71c20eb8
--- /dev/null
+++ b/google/api_core/operations_v1/transports/rest_asyncio.py
@@ -0,0 +1,560 @@
+# -*- coding: utf-8 -*-
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import json
+from typing import Any, Callable, Coroutine, Dict, Optional, Sequence, Tuple
+
+from google.auth import __version__ as auth_version
+
+try:
+ from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore
+except ImportError as e: # pragma: NO COVER
+ raise ImportError(
+ "The `async_rest` extra of `google-api-core` is required to use long-running operations. Install it by running "
+ "`pip install google-api-core[async_rest]`."
+ ) from e
+
+from google.api_core import exceptions as core_exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import path_template # type: ignore
+from google.api_core import rest_helpers # type: ignore
+from google.api_core import retry_async as retries_async # type: ignore
+from google.auth.aio import credentials as ga_credentials_async # type: ignore
+from google.longrunning import operations_pb2 # type: ignore
+from google.protobuf import empty_pb2 # type: ignore
+from google.protobuf import json_format # type: ignore
+
+from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, OperationsTransport
+
+DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version,
+ grpc_version=None,
+ rest_version=f"google-auth@{auth_version}",
+)
+
+
+class AsyncOperationsRestTransport(OperationsTransport):
+ """Asynchronous REST backend transport for Operations.
+
+ Manages async long-running operations with an API service.
+
+ When an API method normally takes long time to complete, it can be
+ designed to return [Operation][google.api_core.operations_v1.Operation] to the
+ client, and the client can use this interface to receive the real
+ response asynchronously by polling the operation resource, or pass
+ the operation resource to another API (such as Google Cloud Pub/Sub
+ API) to receive the response. Any API service that returns
+ long-running operations should implement the ``Operations``
+ interface so developers can have a consistent client experience.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends JSON representations of protocol buffers over HTTP/1.1
+ """
+
+ def __init__(
+ self,
+ *,
+ host: str = "longrunning.googleapis.com",
+ credentials: Optional[ga_credentials_async.Credentials] = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ url_scheme: str = "https",
+ http_options: Optional[Dict] = None,
+ path_prefix: str = "v1",
+ # TODO(https://github.com/googleapis/python-api-core/issues/715): Add docstring for `credentials_file` to async REST transport.
+ # TODO(https://github.com/googleapis/python-api-core/issues/716): Add docstring for `scopes` to async REST transport.
+ # TODO(https://github.com/googleapis/python-api-core/issues/717): Add docstring for `quota_project_id` to async REST transport.
+ # TODO(https://github.com/googleapis/python-api-core/issues/718): Add docstring for `client_cert_source` to async REST transport.
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]):
+ The hostname to connect to.
+ credentials (Optional[google.auth.aio.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ url_scheme: the protocol scheme for the API endpoint. Normally
+ "https", but for testing or local servers,
+ "http" can be specified.
+ http_options: a dictionary of http_options for transcoding, to override
+ the defaults from operations.proto. Each method has an entry
+ with the corresponding http rules as value.
+ path_prefix: path prefix (usually represents API version). Set to
+ "v1" by default.
+
+ """
+ unsupported_params = {
+ # TODO(https://github.com/googleapis/python-api-core/issues/715): Add support for `credentials_file` to async REST transport.
+ "google.api_core.client_options.ClientOptions.credentials_file": credentials_file,
+ # TODO(https://github.com/googleapis/python-api-core/issues/716): Add support for `scopes` to async REST transport.
+ "google.api_core.client_options.ClientOptions.scopes": scopes,
+ # TODO(https://github.com/googleapis/python-api-core/issues/717): Add support for `quota_project_id` to async REST transport.
+ "google.api_core.client_options.ClientOptions.quota_project_id": quota_project_id,
+ # TODO(https://github.com/googleapis/python-api-core/issues/718): Add support for `client_cert_source` to async REST transport.
+ "google.api_core.client_options.ClientOptions.client_cert_source": client_cert_source_for_mtls,
+ # TODO(https://github.com/googleapis/python-api-core/issues/718): Add support for `client_cert_source` to async REST transport.
+ "google.api_core.client_options.ClientOptions.client_cert_source": client_cert_source_for_mtls,
+ }
+ provided_unsupported_params = [
+ name for name, value in unsupported_params.items() if value is not None
+ ]
+ if provided_unsupported_params:
+ raise core_exceptions.AsyncRestUnsupportedParameterError(
+ f"The following provided parameters are not supported for `transport=rest_asyncio`: {', '.join(provided_unsupported_params)}"
+ )
+
+ super().__init__(
+ host=host,
+ # TODO(https://github.com/googleapis/python-api-core/issues/709): Remove `type: ignore` when the linked issue is resolved.
+ credentials=credentials, # type: ignore
+ client_info=client_info,
+ # TODO(https://github.com/googleapis/python-api-core/issues/725): Set always_use_jwt_access token when supported.
+ always_use_jwt_access=False,
+ )
+ # TODO(https://github.com/googleapis/python-api-core/issues/708): add support for
+ # `default_host` in AsyncAuthorizedSession for feature parity with the synchronous
+ # code.
+ # TODO(https://github.com/googleapis/python-api-core/issues/709): Remove `type: ignore` when the linked issue is resolved.
+ self._session = AsyncAuthorizedSession(self._credentials) # type: ignore
+ # TODO(https://github.com/googleapis/python-api-core/issues/720): Add wrap logic directly to the property methods for callables.
+ self._prep_wrapped_messages(client_info)
+ self._http_options = http_options or {}
+ self._path_prefix = path_prefix
+
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.list_operations: gapic_v1.method_async.wrap_method(
+ self.list_operations,
+ default_retry=retries_async.AsyncRetry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries_async.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ kind="rest_asyncio",
+ ),
+ self.get_operation: gapic_v1.method_async.wrap_method(
+ self.get_operation,
+ default_retry=retries_async.AsyncRetry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries_async.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ kind="rest_asyncio",
+ ),
+ self.delete_operation: gapic_v1.method_async.wrap_method(
+ self.delete_operation,
+ default_retry=retries_async.AsyncRetry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries_async.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ kind="rest_asyncio",
+ ),
+ self.cancel_operation: gapic_v1.method_async.wrap_method(
+ self.cancel_operation,
+ default_retry=retries_async.AsyncRetry(
+ initial=0.5,
+ maximum=10.0,
+ multiplier=2.0,
+ predicate=retries_async.if_exception_type(
+ core_exceptions.ServiceUnavailable,
+ ),
+ deadline=10.0,
+ ),
+ default_timeout=10.0,
+ client_info=client_info,
+ kind="rest_asyncio",
+ ),
+ }
+
+ async def _list_operations(
+ self,
+ request: operations_pb2.ListOperationsRequest,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.ListOperationsResponse:
+ r"""Asynchronously call the list operations method over HTTP.
+
+ Args:
+ request (~.operations_pb2.ListOperationsRequest):
+ The request object. The request message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operations_pb2.ListOperationsResponse:
+ The response message for
+ [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations].
+
+ """
+
+ http_options = [
+ {
+ "method": "get",
+ "uri": "/{}/{{name=**}}/operations".format(self._path_prefix),
+ },
+ ]
+ if "google.longrunning.Operations.ListOperations" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.ListOperations"
+ ]
+
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.ListOperationsRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
+ response = await getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+ content = await response.read()
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ payload = json.loads(content.decode("utf-8"))
+ request_url = "{host}{uri}".format(host=self._host, uri=uri)
+ raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore
+
+ # Return the response
+ api_response = operations_pb2.ListOperationsResponse()
+ json_format.Parse(content, api_response, ignore_unknown_fields=False)
+ return api_response
+
+ async def _get_operation(
+ self,
+ request: operations_pb2.GetOperationRequest,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operations_pb2.Operation:
+ r"""Asynchronously call the get operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.GetOperationRequest):
+ The request object. The request message for
+ [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation].
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.operations_pb2.Operation:
+ This resource represents a long-
+ running operation that is the result of a
+ network API call.
+
+ """
+
+ http_options = [
+ {
+ "method": "get",
+ "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix),
+ },
+ ]
+ if "google.longrunning.Operations.GetOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.GetOperation"
+ ]
+
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.GetOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
+ response = await getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+ content = await response.read()
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ payload = json.loads(content.decode("utf-8"))
+ request_url = "{host}{uri}".format(host=self._host, uri=uri)
+ raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore
+
+ # Return the response
+ api_response = operations_pb2.Operation()
+ json_format.Parse(content, api_response, ignore_unknown_fields=False)
+ return api_response
+
+ async def _delete_operation(
+ self,
+ request: operations_pb2.DeleteOperationRequest,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> empty_pb2.Empty:
+ r"""Asynchronously call the delete operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.DeleteOperationRequest):
+ The request object. The request message for
+ [Operations.DeleteOperation][google.api_core.operations_v1.Operations.DeleteOperation].
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+
+ http_options = [
+ {
+ "method": "delete",
+ "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix),
+ },
+ ]
+ if "google.longrunning.Operations.DeleteOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.DeleteOperation"
+ ]
+
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.DeleteOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
+ response = await getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ content = await response.read()
+ payload = json.loads(content.decode("utf-8"))
+ request_url = "{host}{uri}".format(host=self._host, uri=uri)
+ raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore
+
+ return empty_pb2.Empty()
+
+ async def _cancel_operation(
+ self,
+ request: operations_pb2.CancelOperationRequest,
+ *,
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry`
+ # to allow configuring retryable error codes.
+ retry=gapic_v1.method_async.DEFAULT,
+ timeout: Optional[float] = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ # TODO(https://github.com/googleapis/python-api-core/issues/722): Add `retry` parameter
+ # to allow configuring retryable error codes.
+ ) -> empty_pb2.Empty:
+ r"""Asynchronously call the cancel operation method over HTTP.
+
+ Args:
+ request (~.operations_pb2.CancelOperationRequest):
+ The request object. The request message for
+ [Operations.CancelOperation][google.api_core.operations_v1.Operations.CancelOperation].
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+
+ http_options = [
+ {
+ "method": "post",
+ "uri": "/{}/{{name=**/operations/*}}:cancel".format(self._path_prefix),
+ "body": "*",
+ },
+ ]
+ if "google.longrunning.Operations.CancelOperation" in self._http_options:
+ http_options = self._http_options[
+ "google.longrunning.Operations.CancelOperation"
+ ]
+
+ request_kwargs = self._convert_protobuf_message_to_dict(request)
+ transcoded_request = path_template.transcode(http_options, **request_kwargs)
+
+ # Jsonify the request body
+ body_request = operations_pb2.CancelOperationRequest()
+ json_format.ParseDict(transcoded_request["body"], body_request)
+ body = json_format.MessageToDict(
+ body_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+ uri = transcoded_request["uri"]
+ method = transcoded_request["method"]
+
+ # Jsonify the query params
+ query_params_request = operations_pb2.CancelOperationRequest()
+ json_format.ParseDict(transcoded_request["query_params"], query_params_request)
+ query_params = json_format.MessageToDict(
+ query_params_request,
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False,
+ )
+
+ # Send the request
+ headers = dict(metadata)
+ headers["Content-Type"] = "application/json"
+ # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name.
+ response = await getattr(self._session, method)(
+ "{host}{uri}".format(host=self._host, uri=uri),
+ timeout=timeout,
+ headers=headers,
+ params=rest_helpers.flatten_query_params(query_params),
+ data=body,
+ )
+
+ # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception
+ # subclass.
+ if response.status_code >= 400:
+ content = await response.read()
+ payload = json.loads(content.decode("utf-8"))
+ request_url = "{host}{uri}".format(host=self._host, uri=uri)
+ raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore
+
+ return empty_pb2.Empty()
+
+ @property
+ def list_operations(
+ self,
+ ) -> Callable[
+ [operations_pb2.ListOperationsRequest],
+ Coroutine[Any, Any, operations_pb2.ListOperationsResponse],
+ ]:
+ return self._list_operations
+
+ @property
+ def get_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.GetOperationRequest],
+ Coroutine[Any, Any, operations_pb2.Operation],
+ ]:
+ return self._get_operation
+
+ @property
+ def delete_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.DeleteOperationRequest], Coroutine[Any, Any, empty_pb2.Empty]
+ ]:
+ return self._delete_operation
+
+ @property
+ def cancel_operation(
+ self,
+ ) -> Callable[
+ [operations_pb2.CancelOperationRequest], Coroutine[Any, Any, empty_pb2.Empty]
+ ]:
+ return self._cancel_operation
+
+
+__all__ = ("AsyncOperationsRestTransport",)
diff --git a/google/api_core/protobuf_helpers.py b/google/api_core/protobuf_helpers.py
index d777c5f8..30cd7c85 100644
--- a/google/api_core/protobuf_helpers.py
+++ b/google/api_core/protobuf_helpers.py
@@ -63,9 +63,7 @@ def from_any_pb(pb_type, any_pb):
# Unpack the Any object and populate the protobuf message instance.
if not any_pb.Unpack(msg_pb):
raise TypeError(
- "Could not convert {} to {}".format(
- any_pb.__class__.__name__, pb_type.__name__
- )
+ f"Could not convert `{any_pb.TypeName()}` with underlying type `google.protobuf.any_pb2.Any` to `{msg_pb.DESCRIPTOR.full_name}`"
)
# Done; return the message.
diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py
index 1606e0fe..263b4ccf 100644
--- a/google/api_core/retry/retry_base.py
+++ b/google/api_core/retry/retry_base.py
@@ -25,7 +25,7 @@
import time
from enum import Enum
-from typing import Any, Callable, Optional, TYPE_CHECKING
+from typing import Any, Callable, Optional, Iterator, TYPE_CHECKING
import requests.exceptions
@@ -174,7 +174,7 @@ def build_retry_error(
def _retry_error_helper(
exc: Exception,
deadline: float | None,
- next_sleep: float,
+ sleep_iterator: Iterator[float],
error_list: list[Exception],
predicate_fn: Callable[[Exception], bool],
on_error_fn: Callable[[Exception], None] | None,
@@ -183,7 +183,7 @@ def _retry_error_helper(
tuple[Exception, Exception | None],
],
original_timeout: float | None,
-):
+) -> float:
"""
Shared logic for handling an error for all retry implementations
@@ -194,13 +194,15 @@ def _retry_error_helper(
Args:
- exc: the exception that was raised
- deadline: the deadline for the retry, calculated as a diff from time.monotonic()
- - next_sleep: the next sleep interval
+ - sleep_iterator: iterator to draw the next backoff value from
- error_list: the list of exceptions that have been raised so far
- predicate_fn: takes `exc` and returns true if the operation should be retried
- on_error_fn: callback to execute when a retryable error occurs
- exc_factory_fn: callback used to build the exception to be raised on terminal failure
- original_timeout_val: the original timeout value for the retry (in seconds),
to be passed to the exception factory for building an error message
+ Returns:
+ - the sleep value chosen before the next attempt
"""
error_list.append(exc)
if not predicate_fn(exc):
@@ -212,6 +214,12 @@ def _retry_error_helper(
raise final_exc from source_exc
if on_error_fn is not None:
on_error_fn(exc)
+ # next_sleep is fetched after the on_error callback, to allow clients
+ # to update sleep_iterator values dynamically in response to errors
+ try:
+ next_sleep = next(sleep_iterator)
+ except StopIteration:
+ raise ValueError("Sleep generator stopped yielding sleep values.") from exc
if deadline is not None and time.monotonic() + next_sleep > deadline:
final_exc, source_exc = exc_factory_fn(
error_list,
@@ -222,6 +230,7 @@ def _retry_error_helper(
_LOGGER.debug(
"Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], next_sleep)
)
+ return next_sleep
class _BaseRetry(object):
diff --git a/google/api_core/retry/retry_streaming.py b/google/api_core/retry/retry_streaming.py
index e113323b..e4474c8a 100644
--- a/google/api_core/retry/retry_streaming.py
+++ b/google/api_core/retry/retry_streaming.py
@@ -59,8 +59,8 @@ def retry_target_stream(
[List[Exception], RetryFailureReason, Optional[float]],
Tuple[Exception, Optional[Exception]],
] = build_retry_error,
- init_args: _P.args = (),
- init_kwargs: _P.kwargs = {},
+ init_args: tuple = (),
+ init_kwargs: dict = {},
**kwargs,
) -> Generator[_Y, Any, None]:
"""Create a generator wrapper that retries the wrapped stream if it fails.
@@ -107,8 +107,11 @@ def retry_target_stream(
time.monotonic() + timeout if timeout is not None else None
)
error_list: list[Exception] = []
+ sleep_iter = iter(sleep_generator)
- for sleep in sleep_generator:
+ # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper
+ # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535
+ while True:
# Start a new retry loop
try:
# Note: in the future, we can add a ResumptionStrategy object
@@ -121,10 +124,10 @@ def retry_target_stream(
# This function explicitly must deal with broad exceptions.
except Exception as exc:
# defer to shared logic for handling errors
- _retry_error_helper(
+ next_sleep = _retry_error_helper(
exc,
deadline,
- sleep,
+ sleep_iter,
error_list,
predicate,
on_error,
@@ -132,9 +135,7 @@ def retry_target_stream(
timeout,
)
# if exception not raised, sleep before next attempt
- time.sleep(sleep)
-
- raise ValueError("Sleep generator stopped yielding sleep values.")
+ time.sleep(next_sleep)
class StreamingRetry(_BaseRetry):
diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py
index 2924ba14..5e5fa240 100644
--- a/google/api_core/retry/retry_streaming_async.py
+++ b/google/api_core/retry/retry_streaming_async.py
@@ -62,8 +62,8 @@ async def retry_target_stream(
[list[Exception], RetryFailureReason, float | None],
tuple[Exception, Exception | None],
] = build_retry_error,
- init_args: _P.args = (),
- init_kwargs: _P.kwargs = {},
+ init_args: tuple = (),
+ init_kwargs: dict = {},
**kwargs,
) -> AsyncGenerator[_Y, None]:
"""Create a generator wrapper that retries the wrapped stream if it fails.
@@ -109,9 +109,12 @@ async def retry_target_stream(
deadline = time.monotonic() + timeout if timeout else None
# keep track of retryable exceptions we encounter to pass in to exception_factory
error_list: list[Exception] = []
+ sleep_iter = iter(sleep_generator)
target_is_generator: bool | None = None
- for sleep in sleep_generator:
+ # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper
+ # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535
+ while True:
# Start a new retry loop
try:
# Note: in the future, we can add a ResumptionStrategy object
@@ -174,10 +177,10 @@ async def retry_target_stream(
# This function explicitly must deal with broad exceptions.
except Exception as exc:
# defer to shared logic for handling errors
- _retry_error_helper(
+ next_sleep = _retry_error_helper(
exc,
deadline,
- sleep,
+ sleep_iter,
error_list,
predicate,
on_error,
@@ -185,11 +188,11 @@ async def retry_target_stream(
timeout,
)
# if exception not raised, sleep before next attempt
- await asyncio.sleep(sleep)
+ await asyncio.sleep(next_sleep)
+
finally:
if target_is_generator and target_iterator is not None:
await cast(AsyncGenerator["_Y", None], target_iterator).aclose()
- raise ValueError("Sleep generator stopped yielding sleep values.")
class AsyncStreamingRetry(_BaseRetry):
diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py
index ab1b4030..6d36bc7d 100644
--- a/google/api_core/retry/retry_unary.py
+++ b/google/api_core/retry/retry_unary.py
@@ -83,7 +83,7 @@ def check_if_exists():
def retry_target(
- target: Callable[_P, _R],
+ target: Callable[[], _R],
predicate: Callable[[Exception], bool],
sleep_generator: Iterable[float],
timeout: float | None = None,
@@ -138,8 +138,11 @@ def retry_target(
deadline = time.monotonic() + timeout if timeout is not None else None
error_list: list[Exception] = []
+ sleep_iter = iter(sleep_generator)
- for sleep in sleep_generator:
+ # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper
+ # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535
+ while True:
try:
result = target()
if inspect.isawaitable(result):
@@ -150,10 +153,10 @@ def retry_target(
# This function explicitly must deal with broad exceptions.
except Exception as exc:
# defer to shared logic for handling errors
- _retry_error_helper(
+ next_sleep = _retry_error_helper(
exc,
deadline,
- sleep,
+ sleep_iter,
error_list,
predicate,
on_error,
@@ -161,9 +164,7 @@ def retry_target(
timeout,
)
# if exception not raised, sleep before next attempt
- time.sleep(sleep)
-
- raise ValueError("Sleep generator stopped yielding sleep values.")
+ time.sleep(next_sleep)
class Retry(_BaseRetry):
diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py
index 3bdf6c71..1f72476a 100644
--- a/google/api_core/retry/retry_unary_async.py
+++ b/google/api_core/retry/retry_unary_async.py
@@ -94,7 +94,7 @@ async def check_if_exists():
async def retry_target(
- target: Callable[_P, Awaitable[_R]],
+ target: Callable[[], Awaitable[_R]],
predicate: Callable[[Exception], bool],
sleep_generator: Iterable[float],
timeout: float | None = None,
@@ -149,18 +149,21 @@ async def retry_target(
deadline = time.monotonic() + timeout if timeout is not None else None
error_list: list[Exception] = []
+ sleep_iter = iter(sleep_generator)
- for sleep in sleep_generator:
+ # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper
+ # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535
+ while True:
try:
return await target()
# pylint: disable=broad-except
# This function explicitly must deal with broad exceptions.
except Exception as exc:
# defer to shared logic for handling errors
- _retry_error_helper(
+ next_sleep = _retry_error_helper(
exc,
deadline,
- sleep,
+ sleep_iter,
error_list,
predicate,
on_error,
@@ -168,9 +171,7 @@ async def retry_target(
timeout,
)
# if exception not raised, sleep before next attempt
- await asyncio.sleep(sleep)
-
- raise ValueError("Sleep generator stopped yielding sleep values.")
+ await asyncio.sleep(next_sleep)
class AsyncRetry(_BaseRetry):
diff --git a/google/api_core/timeout.py b/google/api_core/timeout.py
index 868e3e9f..55b195e9 100644
--- a/google/api_core/timeout.py
+++ b/google/api_core/timeout.py
@@ -102,8 +102,7 @@ def __call__(self, func):
def func_with_timeout(*args, **kwargs):
"""Wrapped function that adds timeout."""
- remaining_timeout = self._timeout
- if remaining_timeout is not None:
+ if self._timeout is not None:
# All calculations are in seconds
now_timestamp = self._clock().timestamp()
@@ -114,8 +113,19 @@ def func_with_timeout(*args, **kwargs):
now_timestamp = first_attempt_timestamp
time_since_first_attempt = now_timestamp - first_attempt_timestamp
- # Avoid setting negative timeout
- kwargs["timeout"] = max(0, self._timeout - time_since_first_attempt)
+ remaining_timeout = self._timeout - time_since_first_attempt
+
+ # Although the `deadline` parameter in `google.api_core.retry.Retry`
+ # is deprecated, and should be treated the same as the `timeout`,
+ # it is still possible for the `deadline` argument in
+ # `google.api_core.retry.Retry` to be larger than the `timeout`.
+ # See https://github.com/googleapis/python-api-core/issues/654
+ # Only positive non-zero timeouts are supported.
+ # Revert back to the initial timeout for negative or 0 timeout values.
+ if remaining_timeout < 1:
+ remaining_timeout = self._timeout
+
+ kwargs["timeout"] = remaining_timeout
return func(*args, **kwargs)
diff --git a/google/api_core/version.py b/google/api_core/version.py
index 9fea4fec..21cbec9f 100644
--- a/google/api_core/version.py
+++ b/google/api_core/version.py
@@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-__version__ = "2.20.0"
+__version__ = "2.25.1"
diff --git a/mypy.ini b/mypy.ini
deleted file mode 100644
index ce33582a..00000000
--- a/mypy.ini
+++ /dev/null
@@ -1,4 +0,0 @@
-[mypy]
-python_version = 3.7
-namespace_packages = True
-ignore_missing_imports = True
diff --git a/noxfile.py b/noxfile.py
index 144e3e21..ac21330e 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -28,7 +28,7 @@
# Black and flake8 clash on the syntax for ignoring flake8's F401 in this file.
BLACK_EXCLUDES = ["--exclude", "^/google/api_core/operations_v1/__init__.py"]
-PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
+PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]
DEFAULT_PYTHON_VERSION = "3.10"
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
@@ -124,7 +124,7 @@ def default(session, install_grpc=True, prerelease=False, install_async_rest=Fal
session.install(
"dataclasses",
- "mock",
+ "mock; python_version=='3.7'",
"pytest",
"pytest-cov",
"pytest-xdist",
@@ -132,6 +132,7 @@ def default(session, install_grpc=True, prerelease=False, install_async_rest=Fal
install_extras = []
if install_grpc:
+ # Note: The extra is called `grpc` and not `grpcio`.
install_extras.append("grpc")
constraints_dir = str(CURRENT_DIRECTORY / "testing")
@@ -273,15 +274,13 @@ def pytype(session):
@nox.session(python=DEFAULT_PYTHON_VERSION)
def mypy(session):
"""Run type-checking."""
- # TODO(https://github.com/googleapis/python-api-core/issues/682):
- # Use the latest version of mypy instead of mypy<1.11.0
- session.install(".[grpc,async_rest]", "mypy<1.11.0")
+ session.install(".[grpc,async_rest]", "mypy")
session.install(
"types-setuptools",
"types-requests",
"types-protobuf",
- "types-mock",
"types-dataclasses",
+ "types-mock; python_version=='3.7'",
)
session.run("mypy", "google", "tests")
diff --git a/owlbot.py b/owlbot.py
index c8c76542..58bc7517 100644
--- a/owlbot.py
+++ b/owlbot.py
@@ -26,7 +26,6 @@
excludes = [
"noxfile.py", # pytype
"setup.cfg", # pytype
- ".flake8", # flake8-import-order, layout
".coveragerc", # layout
"CONTRIBUTING.rst", # no systests
".github/workflows/unittest.yml", # exclude unittest gh action
@@ -36,18 +35,6 @@
templated_files = common.py_library(microgenerator=True, cov_level=100)
s.move(templated_files, excludes=excludes)
-# Add pytype support
-s.replace(
- ".gitignore",
- """\
-.pytest_cache
-""",
- """\
-.pytest_cache
-.pytype
-""",
-)
-
python.configure_previous_major_version_branches()
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 00000000..da404ab3
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,107 @@
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+[build-system]
+requires = ["setuptools"]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "google-api-core"
+authors = [{ name = "Google LLC", email = "googleapis-packages@google.com" }]
+license = { text = "Apache 2.0" }
+requires-python = ">=3.7"
+readme = "README.rst"
+description = "Google API client core library"
+classifiers = [
+ # Should be one of:
+ # "Development Status :: 3 - Alpha"
+ # "Development Status :: 4 - Beta"
+ # "Development Status :: 5 - Production/Stable"
+ "Development Status :: 5 - Production/Stable",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: Apache Software License",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.7",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Operating System :: OS Independent",
+ "Topic :: Internet",
+]
+dependencies = [
+ "googleapis-common-protos >= 1.56.2, < 2.0.0",
+ "protobuf >= 3.19.5, < 7.0.0, != 3.20.0, != 3.20.1, != 4.21.0, != 4.21.1, != 4.21.2, != 4.21.3, != 4.21.4, != 4.21.5",
+ "proto-plus >= 1.22.3, < 2.0.0",
+ "proto-plus >= 1.25.0, < 2.0.0; python_version >= '3.13'",
+ "google-auth >= 2.14.1, < 3.0.0",
+ "requests >= 2.18.0, < 3.0.0",
+]
+dynamic = ["version"]
+
+[project.urls]
+Documentation = "https://googleapis.dev/python/google-api-core/latest/"
+Repository = "https://github.com/googleapis/python-api-core"
+
+[project.optional-dependencies]
+async_rest = ["google-auth[aiohttp] >= 2.35.0, < 3.0.0"]
+grpc = [
+ "grpcio >= 1.33.2, < 2.0.0",
+ "grpcio >= 1.49.1, < 2.0.0; python_version >= '3.11'",
+ "grpcio-status >= 1.33.2, < 2.0.0",
+ "grpcio-status >= 1.49.1, < 2.0.0; python_version >= '3.11'",
+]
+grpcgcp = ["grpcio-gcp >= 0.2.2, < 1.0.0"]
+grpcio-gcp = ["grpcio-gcp >= 0.2.2, < 1.0.0"]
+
+[tool.setuptools.dynamic]
+version = { attr = "google.api_core.version.__version__" }
+
+[tool.setuptools.packages.find]
+# Only include packages under the 'google' namespace. Do not include tests,
+# benchmarks, etc.
+include = ["google*"]
+
+[tool.mypy]
+python_version = "3.7"
+namespace_packages = true
+ignore_missing_imports = true
+
+[tool.pytest]
+filterwarnings = [
+ # treat all warnings as errors
+ "error",
+ # Remove once https://github.com/pytest-dev/pytest-cov/issues/621 is fixed
+ "ignore:.*The --rsyncdir command line argument and rsyncdirs config variable are deprecated:DeprecationWarning",
+ # Remove once https://github.com/protocolbuffers/protobuf/issues/12186 is fixed
+ "ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning",
+ # Remove once support for python 3.7 is dropped
+ # This warning only appears when using python 3.7
+ "ignore:.*Using or importing the ABCs from.*collections:DeprecationWarning",
+ # Remove once support for grpcio-gcp is deprecated
+ # See https://github.com/googleapis/python-api-core/blob/42e8b6e6f426cab749b34906529e8aaf3f133d75/google/api_core/grpc_helpers.py#L39-L45
+ "ignore:.*Support for grpcio-gcp is deprecated:DeprecationWarning",
+ "ignore: The `compression` argument is ignored for grpc_gcp.secure_channel creation:DeprecationWarning",
+ "ignore:The `attempt_direct_path` argument is ignored for grpc_gcp.secure_channel creation:DeprecationWarning",
+ # Remove once the minimum supported version of googleapis-common-protos is 1.62.0
+ "ignore:.*pkg_resources.declare_namespace:DeprecationWarning",
+ "ignore:.*pkg_resources is deprecated as an API:DeprecationWarning",
+ # Remove once https://github.com/grpc/grpc/issues/35086 is fixed (and version newer than 1.60.0 is published)
+ "ignore:There is no current event loop:DeprecationWarning",
+ # Remove after support for Python 3.7 is dropped
+ "ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning",
+]
diff --git a/pytest.ini b/pytest.ini
deleted file mode 100644
index 696548cf..00000000
--- a/pytest.ini
+++ /dev/null
@@ -1,23 +0,0 @@
-[pytest]
-filterwarnings =
- # treat all warnings as errors
- error
- # Remove once https://github.com/pytest-dev/pytest-cov/issues/621 is fixed
- ignore:.*The --rsyncdir command line argument and rsyncdirs config variable are deprecated:DeprecationWarning
- # Remove once https://github.com/protocolbuffers/protobuf/issues/12186 is fixed
- ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning
- # Remove once support for python 3.7 is dropped
- # This warning only appears when using python 3.7
- ignore:.*Using or importing the ABCs from.*collections:DeprecationWarning
- # Remove once support for grpcio-gcp is deprecated
- # See https://github.com/googleapis/python-api-core/blob/42e8b6e6f426cab749b34906529e8aaf3f133d75/google/api_core/grpc_helpers.py#L39-L45
- ignore:.*Support for grpcio-gcp is deprecated:DeprecationWarning
- ignore: The `compression` argument is ignored for grpc_gcp.secure_channel creation:DeprecationWarning
- ignore:The `attempt_direct_path` argument is ignored for grpc_gcp.secure_channel creation:DeprecationWarning
- # Remove once the minimum supported version of googleapis-common-protos is 1.62.0
- ignore:.*pkg_resources.declare_namespace:DeprecationWarning
- ignore:.*pkg_resources is deprecated as an API:DeprecationWarning
- # Remove once https://github.com/grpc/grpc/issues/35086 is fixed (and version newer than 1.60.0 is published)
- ignore:There is no current event loop:DeprecationWarning
- # Remove after support for Python 3.7 is dropped
- ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning
diff --git a/renovate.json b/renovate.json
index 39b2a0ec..c7875c46 100644
--- a/renovate.json
+++ b/renovate.json
@@ -5,7 +5,7 @@
":preserveSemverRanges",
":disableDependencyDashboard"
],
- "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"],
+ "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"],
"pip_requirements": {
"fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"]
}
diff --git a/setup.py b/setup.py
index d3c2a2b4..168877fa 100644
--- a/setup.py
+++ b/setup.py
@@ -12,95 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import io
-import os
-
import setuptools
-# Package metadata.
-
-name = "google-api-core"
-description = "Google API client core library"
-
-# Should be one of:
-# 'Development Status :: 3 - Alpha'
-# 'Development Status :: 4 - Beta'
-# 'Development Status :: 5 - Production/Stable'
-release_status = "Development Status :: 5 - Production/Stable"
-dependencies = [
- "googleapis-common-protos >= 1.56.2, < 2.0.dev0",
- "protobuf>=3.19.5,<6.0.0.dev0,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5",
- "proto-plus >= 1.22.3, <2.0.0dev",
- "google-auth >= 2.14.1, < 3.0.dev0",
- "requests >= 2.18.0, < 3.0.0.dev0",
-]
-extras = {
- "async_rest": [
- "google-auth[aiohttp] >= 2.35.0, < 3.0.dev0",
- ],
- "grpc": [
- "grpcio >= 1.33.2, < 2.0dev",
- "grpcio >= 1.49.1, < 2.0dev; python_version>='3.11'",
- "grpcio-status >= 1.33.2, < 2.0.dev0",
- "grpcio-status >= 1.49.1, < 2.0.dev0; python_version>='3.11'",
- ],
- "grpcgcp": "grpcio-gcp >= 0.2.2, < 1.0.dev0",
- "grpcio-gcp": "grpcio-gcp >= 0.2.2, < 1.0.dev0",
-}
-
-
-# Setup boilerplate below this line.
-
-package_root = os.path.abspath(os.path.dirname(__file__))
-
-
-version = {}
-with open(os.path.join(package_root, "google/api_core/version.py")) as fp:
- exec(fp.read(), version)
-version = version["__version__"]
-
-readme_filename = os.path.join(package_root, "README.rst")
-with io.open(readme_filename, encoding="utf-8") as readme_file:
- readme = readme_file.read()
-
-# Only include packages under the 'google' namespace. Do not include tests,
-# benchmarks, etc.
-packages = [
- package
- for package in setuptools.find_namespace_packages()
- if package.startswith("google")
-]
-
-setuptools.setup(
- name=name,
- version=version,
- description=description,
- long_description=readme,
- author="Google LLC",
- author_email="googleapis-packages@google.com",
- license="Apache 2.0",
- url="https://github.com/googleapis/python-api-core",
- classifiers=[
- release_status,
- "Intended Audience :: Developers",
- "License :: OSI Approved :: Apache Software License",
- "Programming Language :: Python",
- "Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.7",
- "Programming Language :: Python :: 3.8",
- "Programming Language :: Python :: 3.9",
- "Programming Language :: Python :: 3.10",
- "Programming Language :: Python :: 3.11",
- "Programming Language :: Python :: 3.12",
- "Operating System :: OS Independent",
- "Topic :: Internet",
- ],
- platforms="Posix; MacOS X; Windows",
- packages=packages,
- install_requires=dependencies,
- extras_require=extras,
- python_requires=">=3.7",
- include_package_data=True,
- zip_safe=False,
-)
+setuptools.setup()
diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/testing/constraints-3.14.txt b/testing/constraints-3.14.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/asyncio/future/test_async_future.py b/tests/asyncio/future/test_async_future.py
index 0cfe6773..659f41cf 100644
--- a/tests/asyncio/future/test_async_future.py
+++ b/tests/asyncio/future/test_async_future.py
@@ -13,8 +13,8 @@
# limitations under the License.
import asyncio
+from unittest import mock
-import mock
import pytest
from google.api_core import exceptions
diff --git a/tests/asyncio/gapic/test_method_async.py b/tests/asyncio/gapic/test_method_async.py
index f64157b4..3edf8b6d 100644
--- a/tests/asyncio/gapic/test_method_async.py
+++ b/tests/asyncio/gapic/test_method_async.py
@@ -14,7 +14,11 @@
import datetime
-import mock
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
import pytest
try:
@@ -77,6 +81,7 @@ async def test_wrap_method_with_custom_client_info():
api_core_version=3,
gapic_version=4,
client_library_version=5,
+ protobuf_runtime_version=6,
)
fake_call = grpc_helpers_async.FakeUnaryUnaryCall()
method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call)
@@ -251,7 +256,11 @@ async def test_wrap_method_with_overriding_timeout_as_a_number():
result = await wrapped_method(timeout=22)
assert result == 42
- method.assert_called_once_with(timeout=22, metadata=mock.ANY)
+
+ actual_timeout = method.call_args[1]["timeout"]
+ metadata = method.call_args[1]["metadata"]
+ assert metadata == mock.ANY
+ assert actual_timeout == pytest.approx(22, abs=0.01)
@pytest.mark.asyncio
diff --git a/tests/asyncio/operations_v1/test_operations_async_client.py b/tests/asyncio/operations_v1/test_operations_async_client.py
index 19ac9b56..e5b20dcd 100644
--- a/tests/asyncio/operations_v1/test_operations_async_client.py
+++ b/tests/asyncio/operations_v1/test_operations_async_client.py
@@ -12,7 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import mock
+from unittest import mock
+
import pytest
try:
diff --git a/tests/asyncio/retry/test_retry_streaming_async.py b/tests/asyncio/retry/test_retry_streaming_async.py
index 28ae6ff1..e44f5361 100644
--- a/tests/asyncio/retry/test_retry_streaming_async.py
+++ b/tests/asyncio/retry/test_retry_streaming_async.py
@@ -12,11 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import asyncio
import datetime
import re
-import asyncio
-import mock
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
import pytest
from google.api_core import exceptions
@@ -31,7 +36,36 @@ async def test_retry_streaming_target_bad_sleep_generator():
from google.api_core.retry.retry_streaming_async import retry_target_stream
with pytest.raises(ValueError, match="Sleep generator"):
- await retry_target_stream(None, None, [], None).__anext__()
+ await retry_target_stream(None, lambda x: True, [], None).__anext__()
+
+
+@mock.patch("asyncio.sleep", autospec=True)
+@pytest.mark.asyncio
+async def test_retry_streaming_target_dynamic_backoff(sleep):
+ """
+ sleep_generator should be iterated after on_error, to support dynamic backoff
+ """
+ from functools import partial
+ from google.api_core.retry.retry_streaming_async import retry_target_stream
+
+ sleep.side_effect = RuntimeError("stop after sleep")
+ # start with empty sleep generator; values are added after exception in push_sleep_value
+ sleep_values = []
+ error_target = partial(TestAsyncStreamingRetry._generator_mock, error_on=0)
+ inserted_sleep = 99
+
+ def push_sleep_value(err):
+ sleep_values.append(inserted_sleep)
+
+ with pytest.raises(RuntimeError):
+ await retry_target_stream(
+ error_target,
+ predicate=lambda x: True,
+ sleep_generator=sleep_values,
+ on_error=push_sleep_value,
+ ).__anext__()
+ assert sleep.call_count == 1
+ sleep.assert_called_once_with(inserted_sleep)
class TestAsyncStreamingRetry(Test_BaseRetry):
@@ -61,8 +95,8 @@ def if_exception_type(exc):
str(retry_),
)
+ @staticmethod
async def _generator_mock(
- self,
num=5,
error_on=None,
exceptions_seen=None,
@@ -82,7 +116,7 @@ async def _generator_mock(
for i in range(num):
if sleep_time:
await asyncio.sleep(sleep_time)
- if error_on and i == error_on:
+ if error_on is not None and i == error_on:
raise ValueError("generator mock error")
yield i
except (Exception, BaseException, GeneratorExit) as e:
@@ -134,6 +168,7 @@ async def test___call___generator_retry(self, sleep):
unpacked = [await generator.__anext__() for i in range(10)]
assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0]
assert on_error.call_count == 3
+ await generator.aclose()
@mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n)
@mock.patch("asyncio.sleep", autospec=True)
@@ -241,6 +276,7 @@ async def _mock_send_gen():
recv = await generator.asend(msg)
out_messages.append(recv)
assert in_messages == out_messages
+ await generator.aclose()
@mock.patch("asyncio.sleep", autospec=True)
@pytest.mark.asyncio
@@ -258,6 +294,7 @@ async def test___call___generator_send_retry(self, sleep):
with pytest.raises(TypeError) as exc_info:
await generator.asend("cannot send to fresh generator")
assert exc_info.match("can't send non-None value")
+ await generator.aclose()
# error thrown on 3
# generator should contain 0, 1, 2 looping
@@ -266,6 +303,7 @@ async def test___call___generator_send_retry(self, sleep):
unpacked = [await generator.asend(i) for i in range(10)]
assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1]
assert on_error.call_count == 3
+ await generator.aclose()
@mock.patch("asyncio.sleep", autospec=True)
@pytest.mark.asyncio
@@ -377,6 +415,7 @@ async def wrapper():
assert await retryable.asend("test") == 1
assert await retryable.asend("test2") == 2
assert await retryable.asend("test3") == 3
+ await retryable.aclose()
@pytest.mark.parametrize("awaitable_wrapped", [True, False])
@mock.patch("asyncio.sleep", autospec=True)
diff --git a/tests/asyncio/retry/test_retry_unary_async.py b/tests/asyncio/retry/test_retry_unary_async.py
index fc2f572b..e7fdc963 100644
--- a/tests/asyncio/retry/test_retry_unary_async.py
+++ b/tests/asyncio/retry/test_retry_unary_async.py
@@ -15,7 +15,11 @@
import datetime
import re
-import mock
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
import pytest
from google.api_core import exceptions
@@ -132,9 +136,34 @@ async def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg)
@pytest.mark.asyncio
async def test_retry_target_bad_sleep_generator():
with pytest.raises(ValueError, match="Sleep generator"):
+ await retry_async.retry_target(mock.sentinel.target, lambda x: True, [], None)
+
+
+@mock.patch("asyncio.sleep", autospec=True)
+@pytest.mark.asyncio
+async def test_retry_target_dynamic_backoff(sleep):
+ """
+ sleep_generator should be iterated after on_error, to support dynamic backoff
+ """
+ sleep.side_effect = RuntimeError("stop after sleep")
+ # start with empty sleep generator; values are added after exception in push_sleep_value
+ sleep_values = []
+ exception = ValueError("trigger retry")
+ error_target = mock.Mock(side_effect=exception)
+ inserted_sleep = 99
+
+ def push_sleep_value(err):
+ sleep_values.append(inserted_sleep)
+
+ with pytest.raises(RuntimeError):
await retry_async.retry_target(
- mock.sentinel.target, mock.sentinel.predicate, [], None
+ error_target,
+ predicate=lambda x: True,
+ sleep_generator=sleep_values,
+ on_error=push_sleep_value,
)
+ assert sleep.call_count == 1
+ sleep.assert_called_once_with(inserted_sleep)
class TestAsyncRetry(Test_BaseRetry):
diff --git a/tests/asyncio/test_grpc_helpers_async.py b/tests/asyncio/test_grpc_helpers_async.py
index 1a408ccd..aa8d5d10 100644
--- a/tests/asyncio/test_grpc_helpers_async.py
+++ b/tests/asyncio/test_grpc_helpers_async.py
@@ -12,7 +12,11 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import mock
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
import pytest # noqa: I202
try:
@@ -315,7 +319,7 @@ def test_awaitable_grpc_call():
"""
AwaitableGrpcCall type should be an Awaitable and a grpc.aio.Call.
"""
- instance = grpc_helpers_async.AwaitableGrpcCall[int]()
+ instance = grpc_helpers_async.AwaitableGrpcCall()
assert isinstance(instance, grpc.aio.Call)
# should implement __await__
assert hasattr(instance, "__await__")
diff --git a/tests/asyncio/test_operation_async.py b/tests/asyncio/test_operation_async.py
index 127ba634..5b2f012b 100644
--- a/tests/asyncio/test_operation_async.py
+++ b/tests/asyncio/test_operation_async.py
@@ -13,9 +13,14 @@
# limitations under the License.
-import mock
import pytest
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
try:
import grpc # noqa: F401
except ImportError: # pragma: NO COVER
@@ -79,7 +84,8 @@ async def test_constructor():
assert await future.running()
-def test_metadata():
+@pytest.mark.asyncio
+async def test_metadata():
expected_metadata = struct_pb2.Struct()
future, _, _ = make_operation_future(
[make_operation_proto(metadata=expected_metadata)]
@@ -171,7 +177,8 @@ async def test_unexpected_result(unused_sleep):
assert "Unexpected state" in "{!r}".format(exception)
-def test_from_gapic():
+@pytest.mark.asyncio
+async def test_from_gapic():
operation_proto = make_operation_proto(done=True)
operations_client = mock.create_autospec(
operations_v1.OperationsClient, instance=True
diff --git a/tests/asyncio/test_page_iterator_async.py b/tests/asyncio/test_page_iterator_async.py
index 75f9e1cf..63e26d02 100644
--- a/tests/asyncio/test_page_iterator_async.py
+++ b/tests/asyncio/test_page_iterator_async.py
@@ -14,7 +14,11 @@
import inspect
-import mock
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
import pytest
from google.api_core import page_iterator_async
@@ -106,6 +110,7 @@ async def test__page_aiter_increment(self):
await page_aiter.__anext__()
assert iterator.num_results == 1
+ await page_aiter.aclose()
@pytest.mark.asyncio
async def test__page_aiter_no_increment(self):
@@ -118,6 +123,7 @@ async def test__page_aiter_no_increment(self):
# results should still be 0 after fetching a page.
assert iterator.num_results == 0
+ await page_aiter.aclose()
@pytest.mark.asyncio
async def test__items_aiter(self):
diff --git a/tests/asyncio/test_rest_streaming_async.py b/tests/asyncio/test_rest_streaming_async.py
index da5b1c8d..c9caa2b1 100644
--- a/tests/asyncio/test_rest_streaming_async.py
+++ b/tests/asyncio/test_rest_streaming_async.py
@@ -15,15 +15,20 @@
# TODO: set random.seed explicitly in each test function.
# See related issue: https://github.com/googleapis/python-api-core/issues/689.
-import pytest # noqa: I202
-import mock
-
import datetime
import logging
import random
import time
from typing import List, AsyncIterator
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
+import pytest # noqa: I202
+
import proto
try:
diff --git a/tests/unit/future/test__helpers.py b/tests/unit/future/test__helpers.py
index 98afc599..a37efdd4 100644
--- a/tests/unit/future/test__helpers.py
+++ b/tests/unit/future/test__helpers.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import mock
+from unittest import mock
from google.api_core.future import _helpers
diff --git a/tests/unit/future/test_polling.py b/tests/unit/future/test_polling.py
index f5d9b4f1..2f66f230 100644
--- a/tests/unit/future/test_polling.py
+++ b/tests/unit/future/test_polling.py
@@ -15,8 +15,8 @@
import concurrent.futures
import threading
import time
+from unittest import mock
-import mock
import pytest
from google.api_core import exceptions, retry
diff --git a/tests/unit/gapic/test_method.py b/tests/unit/gapic/test_method.py
index d966f478..c27de64e 100644
--- a/tests/unit/gapic/test_method.py
+++ b/tests/unit/gapic/test_method.py
@@ -13,8 +13,8 @@
# limitations under the License.
import datetime
+from unittest import mock
-import mock
import pytest
try:
@@ -76,6 +76,7 @@ def test_wrap_method_with_custom_client_info():
api_core_version=3,
gapic_version=4,
client_library_version=5,
+ protobuf_runtime_version=6,
)
method = mock.Mock(spec=["__call__"])
@@ -200,7 +201,11 @@ def test_wrap_method_with_overriding_timeout_as_a_number():
result = wrapped_method(timeout=22)
assert result == 42
- method.assert_called_once_with(timeout=22, metadata=mock.ANY)
+
+ actual_timeout = method.call_args[1]["timeout"]
+ metadata = method.call_args[1]["metadata"]
+ assert metadata == mock.ANY
+ assert actual_timeout == pytest.approx(22, abs=0.01)
def test_wrap_method_with_call():
diff --git a/tests/unit/operations_v1/test_operations_rest_client.py b/tests/unit/operations_v1/test_operations_rest_client.py
index 4ab4f1f7..d1f6e0eb 100644
--- a/tests/unit/operations_v1/test_operations_rest_client.py
+++ b/tests/unit/operations_v1/test_operations_rest_client.py
@@ -15,23 +15,31 @@
#
import os
-import mock
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
import pytest
+from typing import Any, List
try:
import grpc # noqa: F401
except ImportError: # pragma: NO COVER
pytest.skip("No GRPC", allow_module_level=True)
from requests import Response # noqa I201
-from requests.sessions import Session
+from google.auth.transport.requests import AuthorizedSession
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core.operations_v1 import AbstractOperationsClient
+
+import google.auth
from google.api_core.operations_v1 import pagers
+from google.api_core.operations_v1 import pagers_async
from google.api_core.operations_v1 import transports
-import google.auth
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.longrunning import operations_pb2
@@ -39,6 +47,16 @@
from google.protobuf import json_format # type: ignore
from google.rpc import status_pb2 # type: ignore
+try:
+ import aiohttp # noqa: F401
+ import google.auth.aio.transport
+ from google.auth.aio.transport.sessions import AsyncAuthorizedSession
+ from google.api_core.operations_v1 import AsyncOperationsRestClient
+ from google.auth.aio import credentials as ga_credentials_async
+
+ GOOGLE_AUTH_AIO_INSTALLED = True
+except ImportError:
+ GOOGLE_AUTH_AIO_INSTALLED = False
HTTP_OPTIONS = {
"google.longrunning.Operations.CancelOperation": [
@@ -55,17 +73,62 @@
],
}
+PYPARAM_CLIENT: List[Any] = [
+ AbstractOperationsClient,
+]
+PYPARAM_CLIENT_TRANSPORT_NAME = [
+ [AbstractOperationsClient, transports.OperationsRestTransport, "rest"],
+]
+PYPARAM_CLIENT_TRANSPORT_CREDENTIALS = [
+ [
+ AbstractOperationsClient,
+ transports.OperationsRestTransport,
+ ga_credentials.AnonymousCredentials(),
+ ],
+]
+
+if GOOGLE_AUTH_AIO_INSTALLED:
+ PYPARAM_CLIENT.append(AsyncOperationsRestClient)
+ PYPARAM_CLIENT_TRANSPORT_NAME.append(
+ [
+ AsyncOperationsRestClient,
+ transports.AsyncOperationsRestTransport,
+ "rest_asyncio",
+ ]
+ )
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS.append(
+ [
+ AsyncOperationsRestClient,
+ transports.AsyncOperationsRestTransport,
+ ga_credentials_async.AnonymousCredentials(),
+ ]
+ )
+
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
-def _get_operations_client(http_options=HTTP_OPTIONS):
- transport = transports.rest.OperationsRestTransport(
- credentials=ga_credentials.AnonymousCredentials(), http_options=http_options
+def _get_session_type(is_async: bool):
+ return (
+ AsyncAuthorizedSession
+ if is_async and GOOGLE_AUTH_AIO_INSTALLED
+ else AuthorizedSession
)
- return AbstractOperationsClient(transport=transport)
+
+def _get_operations_client(is_async: bool, http_options=HTTP_OPTIONS):
+ if is_async and GOOGLE_AUTH_AIO_INSTALLED:
+ async_transport = transports.rest_asyncio.AsyncOperationsRestTransport(
+ credentials=ga_credentials_async.AnonymousCredentials(),
+ http_options=http_options,
+ )
+ return AsyncOperationsRestClient(transport=async_transport)
+ else:
+ sync_transport = transports.rest.OperationsRestTransport(
+ credentials=ga_credentials.AnonymousCredentials(), http_options=http_options
+ )
+ return AbstractOperationsClient(transport=sync_transport)
# If default endpoint is localhost, then default mtls endpoint will be the same.
@@ -79,57 +142,69 @@ def modify_default_endpoint(client):
)
-def test__get_default_mtls_endpoint():
+# TODO: Add support for mtls in async rest
+@pytest.mark.parametrize(
+ "client_class",
+ [
+ AbstractOperationsClient,
+ ],
+)
+def test__get_default_mtls_endpoint(client_class):
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
- assert AbstractOperationsClient._get_default_mtls_endpoint(None) is None
+ assert client_class._get_default_mtls_endpoint(None) is None
+ assert client_class._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
assert (
- AbstractOperationsClient._get_default_mtls_endpoint(api_endpoint)
- == api_mtls_endpoint
+ client_class._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
)
assert (
- AbstractOperationsClient._get_default_mtls_endpoint(api_mtls_endpoint)
- == api_mtls_endpoint
- )
- assert (
- AbstractOperationsClient._get_default_mtls_endpoint(sandbox_endpoint)
+ client_class._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
- AbstractOperationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+ client_class._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
- assert (
- AbstractOperationsClient._get_default_mtls_endpoint(non_googleapi)
- == non_googleapi
- )
+ assert client_class._get_default_mtls_endpoint(non_googleapi) == non_googleapi
-@pytest.mark.parametrize("client_class", [AbstractOperationsClient])
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
def test_operations_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
- with mock.patch.object(
- service_account.Credentials, "from_service_account_info"
- ) as factory:
- factory.return_value = creds
- info = {"valid": True}
- client = client_class.from_service_account_info(info)
- assert client.transport._credentials == creds
- assert isinstance(client, client_class)
+ if "async" in str(client_class):
+ # TODO(): Add support for service account info to async REST transport.
+ with pytest.raises(NotImplementedError):
+ info = {"valid": True}
+ client_class.from_service_account_info(info)
+ else:
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
- assert client.transport._host == "https://longrunning.googleapis.com"
+ assert client.transport._host == "https://longrunning.googleapis.com"
@pytest.mark.parametrize(
- "transport_class,transport_name", [(transports.OperationsRestTransport, "rest")]
+ "transport_class",
+ [
+ transports.OperationsRestTransport,
+ # TODO(https://github.com/googleapis/python-api-core/issues/706): Add support for
+ # service account credentials in transports.AsyncOperationsRestTransport
+ ],
)
-def test_operations_client_service_account_always_use_jwt(
- transport_class, transport_name
-):
+def test_operations_client_service_account_always_use_jwt(transport_class):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
@@ -145,35 +220,53 @@ def test_operations_client_service_account_always_use_jwt(
use_jwt.assert_not_called()
-@pytest.mark.parametrize("client_class", [AbstractOperationsClient])
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
def test_operations_client_from_service_account_file(client_class):
- creds = ga_credentials.AnonymousCredentials()
- with mock.patch.object(
- service_account.Credentials, "from_service_account_file"
- ) as factory:
- factory.return_value = creds
- client = client_class.from_service_account_file("dummy/file/path.json")
- assert client.transport._credentials == creds
- assert isinstance(client, client_class)
- client = client_class.from_service_account_json("dummy/file/path.json")
- assert client.transport._credentials == creds
- assert isinstance(client, client_class)
+ if "async" in str(client_class):
+ # TODO(): Add support for service account creds to async REST transport.
+ with pytest.raises(NotImplementedError):
+ client_class.from_service_account_file("dummy/file/path.json")
+ else:
+ creds = ga_credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_file"
+ ) as factory:
+ factory.return_value = creds
+ client = client_class.from_service_account_file("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ client = client_class.from_service_account_json("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
- assert client.transport._host == "https://longrunning.googleapis.com"
+ assert client.transport._host == "https://longrunning.googleapis.com"
-def test_operations_client_get_transport_class():
- transport = AbstractOperationsClient.get_transport_class()
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ PYPARAM_CLIENT_TRANSPORT_NAME,
+)
+def test_operations_client_get_transport_class(
+ client_class, transport_class, transport_name
+):
+ transport = client_class.get_transport_class()
available_transports = [
transports.OperationsRestTransport,
]
+ if GOOGLE_AUTH_AIO_INSTALLED:
+ available_transports.append(transports.AsyncOperationsRestTransport)
assert transport in available_transports
- transport = AbstractOperationsClient.get_transport_class("rest")
- assert transport == transports.OperationsRestTransport
+ transport = client_class.get_transport_class(transport_name)
+ assert transport == transport_class
+# TODO(): Update this test case to include async REST once we have support for MTLS.
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[(AbstractOperationsClient, transports.OperationsRestTransport, "rest")],
@@ -186,22 +279,21 @@ def test_operations_client_get_transport_class():
def test_operations_client_client_options(
client_class, transport_class, transport_name
):
- # Check that if channel is provided we won't create a new one.
- with mock.patch.object(AbstractOperationsClient, "get_transport_class") as gtc:
- transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
- client = client_class(transport=transport)
- gtc.assert_not_called()
+ # # Check that if channel is provided we won't create a new one.
+ # with mock.patch.object(AbstractOperationsBaseClient, "get_transport_class") as gtc:
+ # client = client_class(transport=transport_class())
+ # gtc.assert_not_called()
- # Check that if channel is provided via str we will create a new one.
- with mock.patch.object(AbstractOperationsClient, "get_transport_class") as gtc:
- client = client_class(transport=transport_name)
- gtc.assert_called()
+ # # Check that if channel is provided via str we will create a new one.
+ # with mock.patch.object(AbstractOperationsBaseClient, "get_transport_class") as gtc:
+ # client = client_class(transport=transport_name)
+ # gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
- client = client_class(client_options=options)
+ client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
@@ -218,7 +310,7 @@ def test_operations_client_client_options(
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
- client = client_class()
+ client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
@@ -235,7 +327,7 @@ def test_operations_client_client_options(
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
- client = client_class()
+ client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
@@ -264,7 +356,7 @@ def test_operations_client_client_options(
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
- client = client_class(client_options=options)
+ client = client_class(client_options=options, transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
@@ -277,6 +369,7 @@ def test_operations_client_client_options(
)
+# TODO: Add support for mtls in async REST
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
@@ -393,7 +486,7 @@ def fake_init(client_cert_source_for_mtls=None, **kwargs):
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
- [(AbstractOperationsClient, transports.OperationsRestTransport, "rest")],
+ PYPARAM_CLIENT_TRANSPORT_NAME,
)
def test_operations_client_client_options_scopes(
client_class, transport_class, transport_name
@@ -402,52 +495,59 @@ def test_operations_client_client_options_scopes(
options = client_options.ClientOptions(
scopes=["1", "2"],
)
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=["1", "2"],
- client_cert_source_for_mtls=None,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- always_use_jwt_access=True,
- )
+ if "async" in str(client_class):
+ # TODO(): Add support for scopes to async REST transport.
+ with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError):
+ client_class(client_options=options, transport=transport_name)
+ else:
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options, transport=transport_name)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=["1", "2"],
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
- [(AbstractOperationsClient, transports.OperationsRestTransport, "rest")],
+ PYPARAM_CLIENT_TRANSPORT_NAME,
)
def test_operations_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file="credentials.json",
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- client_cert_source_for_mtls=None,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- always_use_jwt_access=True,
- )
-
+ if "async" in str(client_class):
+ # TODO(): Add support for credentials file to async REST transport.
+ with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError):
+ client_class(client_options=options, transport=transport_name)
+ else:
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options, transport=transport_name)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file="credentials.json",
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
+ )
-def test_list_operations_rest(
- transport: str = "rest", request_type=operations_pb2.ListOperationsRequest
-):
- client = _get_operations_client()
+def test_list_operations_rest():
+ client = _get_operations_client(is_async=False)
# Mock the http request call within the method and fake a response.
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
# Designate an appropriate value for the returned response.
return_value = operations_pb2.ListOperationsResponse(
next_page_token="next_page_token_value",
@@ -477,10 +577,49 @@ def test_list_operations_rest(
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_operations_rest_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+
+ client = _get_operations_client(is_async=True)
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.ListOperationsResponse(
+ next_page_token="next_page_token_value",
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.read = mock.AsyncMock(
+ return_value=json_return_value.encode("UTF-8")
+ )
+ req.return_value = response_value
+ response = await client.list_operations(
+ name="operations", filter_="my_filter", page_size=10, page_token="abc"
+ )
+
+ actual_args = req.call_args
+ assert actual_args.args[0] == "GET"
+ assert actual_args.args[1] == "https://longrunning.googleapis.com/v3/operations"
+ assert actual_args.kwargs["params"] == [
+ ("filter", "my_filter"),
+ ("pageSize", 10),
+ ("pageToken", "abc"),
+ ]
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers_async.ListOperationsAsyncPager)
+ assert response.next_page_token == "next_page_token_value"
+
+
def test_list_operations_rest_failure():
- client = _get_operations_client(http_options=None)
+ client = _get_operations_client(is_async=False, http_options=None)
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
response_value = Response()
response_value.status_code = 400
mock_request = mock.MagicMock()
@@ -492,13 +631,31 @@ def test_list_operations_rest_failure():
client.list_operations(name="operations")
+@pytest.mark.asyncio
+async def test_list_operations_rest_failure_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+
+ client = _get_operations_client(is_async=True, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ response_value = mock.Mock()
+ response_value.status_code = 400
+ response_value.read = mock.AsyncMock(return_value=b"{}")
+ mock_request = mock.MagicMock()
+ mock_request.method = "GET"
+ mock_request.url = "https://longrunning.googleapis.com:443/v1/operations"
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ await client.list_operations(name="operations")
+
+
def test_list_operations_rest_pager():
- client = AbstractOperationsClient(
- credentials=ga_credentials.AnonymousCredentials(),
- )
+ client = _get_operations_client(is_async=False, http_options=None)
# Mock the http request call within the method and fake a response.
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
# TODO(kbandes): remove this mock unless there's a good reason for it.
# with mock.patch.object(path_template, 'transcode') as transcode:
# Set the response as a series of pages
@@ -545,13 +702,80 @@ def test_list_operations_rest_pager():
assert page_.next_page_token == token
-def test_get_operation_rest(
- transport: str = "rest", request_type=operations_pb2.GetOperationRequest
-):
- client = _get_operations_client()
+@pytest.mark.asyncio
+async def test_list_operations_rest_pager_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True, http_options=None)
# Mock the http request call within the method and fake a response.
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ # TODO(kbandes): remove this mock unless there's a good reason for it.
+ # with mock.patch.object(path_template, 'transcode') as transcode:
+ # Set the response as a series of pages
+ response = (
+ operations_pb2.ListOperationsResponse(
+ operations=[
+ operations_pb2.Operation(),
+ operations_pb2.Operation(),
+ operations_pb2.Operation(),
+ ],
+ next_page_token="abc",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[],
+ next_page_token="def",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[operations_pb2.Operation()],
+ next_page_token="ghi",
+ ),
+ operations_pb2.ListOperationsResponse(
+ operations=[operations_pb2.Operation(), operations_pb2.Operation()],
+ ),
+ )
+ # Two responses for two calls
+ response = response + response
+
+ # Wrap the values into proper Response objs
+ response = tuple(json_format.MessageToJson(x) for x in response)
+ return_values = tuple(mock.Mock() for i in response)
+ for return_val, response_val in zip(return_values, response):
+ return_val.read = mock.AsyncMock(return_value=response_val.encode("UTF-8"))
+ return_val.status_code = 200
+ req.side_effect = return_values
+
+ pager = await client.list_operations(name="operations")
+
+ responses = []
+ async for response in pager:
+ responses.append(response)
+
+ results = list(responses)
+ assert len(results) == 6
+ assert all(isinstance(i, operations_pb2.Operation) for i in results)
+ pager = await client.list_operations(name="operations")
+
+ responses = []
+ async for response in pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(isinstance(i, operations_pb2.Operation) for i in results)
+
+ pages = []
+
+ async for page in pager.pages:
+ pages.append(page)
+ for page_, token in zip(pages, ["", "", "", "abc", "def", "ghi", ""]):
+ assert page_.next_page_token == token
+
+
+def test_get_operation_rest():
+ client = _get_operations_client(is_async=False)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
# Designate an appropriate value for the returned response.
return_value = operations_pb2.Operation(
name="operations/sample1",
@@ -580,10 +804,46 @@ def test_get_operation_rest(
assert response.done is True
+@pytest.mark.asyncio
+async def test_get_operation_rest_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ # Designate an appropriate value for the returned response.
+ return_value = operations_pb2.Operation(
+ name="operations/sample1",
+ done=True,
+ error=status_pb2.Status(code=411),
+ )
+
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = json_format.MessageToJson(return_value)
+ response_value.read = mock.AsyncMock(return_value=json_return_value)
+ req.return_value = response_value
+ response = await client.get_operation("operations/sample1")
+
+ actual_args = req.call_args
+ assert actual_args.args[0] == "GET"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com/v3/operations/sample1"
+ )
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, operations_pb2.Operation)
+ assert response.name == "operations/sample1"
+ assert response.done is True
+
+
def test_get_operation_rest_failure():
- client = _get_operations_client(http_options=None)
+ client = _get_operations_client(is_async=False, http_options=None)
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
response_value = Response()
response_value.status_code = 400
mock_request = mock.MagicMock()
@@ -595,13 +855,30 @@ def test_get_operation_rest_failure():
client.get_operation("sample0/operations/sample1")
-def test_delete_operation_rest(
- transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest
-):
- client = _get_operations_client()
+@pytest.mark.asyncio
+async def test_get_operation_rest_failure_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ response_value = mock.Mock()
+ response_value.status_code = 400
+ response_value.read = mock.AsyncMock(return_value=b"{}")
+ mock_request = mock.MagicMock()
+ mock_request.method = "GET"
+ mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1"
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ await client.get_operation("sample0/operations/sample1")
+
+
+def test_delete_operation_rest():
+ client = _get_operations_client(is_async=False)
# Mock the http request call within the method and fake a response.
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
@@ -618,10 +895,36 @@ def test_delete_operation_rest(
)
+@pytest.mark.asyncio
+async def test_delete_operation_rest_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = ""
+ response_value.read = mock.AsyncMock(
+ return_value=json_return_value.encode("UTF-8")
+ )
+ req.return_value = response_value
+ await client.delete_operation(name="operations/sample1")
+ assert req.call_count == 1
+ actual_args = req.call_args
+ assert actual_args.args[0] == "DELETE"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com/v3/operations/sample1"
+ )
+
+
def test_delete_operation_rest_failure():
- client = _get_operations_client(http_options=None)
+ client = _get_operations_client(is_async=False, http_options=None)
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
response_value = Response()
response_value.status_code = 400
mock_request = mock.MagicMock()
@@ -633,11 +936,30 @@ def test_delete_operation_rest_failure():
client.delete_operation(name="sample0/operations/sample1")
-def test_cancel_operation_rest(transport: str = "rest"):
- client = _get_operations_client()
+@pytest.mark.asyncio
+async def test_delete_operation_rest_failure_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ response_value = mock.Mock()
+ response_value.status_code = 400
+ response_value.read = mock.AsyncMock(return_value=b"{}")
+ mock_request = mock.MagicMock()
+ mock_request.method = "DELETE"
+ mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1"
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ await client.delete_operation(name="sample0/operations/sample1")
+
+
+def test_cancel_operation_rest():
+ client = _get_operations_client(is_async=False)
# Mock the http request call within the method and fake a response.
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
# Wrap the value into a proper Response obj
response_value = Response()
response_value.status_code = 200
@@ -654,10 +976,36 @@ def test_cancel_operation_rest(transport: str = "rest"):
)
+@pytest.mark.asyncio
+async def test_cancel_operation_rest_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True)
+
+ # Mock the http request call within the method and fake a response.
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ # Wrap the value into a proper Response obj
+ response_value = mock.Mock()
+ response_value.status_code = 200
+ json_return_value = ""
+ response_value.read = mock.AsyncMock(
+ return_value=json_return_value.encode("UTF-8")
+ )
+ req.return_value = response_value
+ await client.cancel_operation(name="operations/sample1")
+ assert req.call_count == 1
+ actual_args = req.call_args
+ assert actual_args.args[0] == "POST"
+ assert (
+ actual_args.args[1]
+ == "https://longrunning.googleapis.com/v3/operations/sample1:cancel"
+ )
+
+
def test_cancel_operation_rest_failure():
- client = _get_operations_client(http_options=None)
+ client = _get_operations_client(is_async=False, http_options=None)
- with mock.patch.object(Session, "request") as req:
+ with mock.patch.object(_get_session_type(is_async=False), "request") as req:
response_value = Response()
response_value.status_code = 400
mock_request = mock.MagicMock()
@@ -671,52 +1019,79 @@ def test_cancel_operation_rest_failure():
client.cancel_operation(name="sample0/operations/sample1")
-def test_credentials_transport_error():
+@pytest.mark.asyncio
+async def test_cancel_operation_rest_failure_async():
+ if not GOOGLE_AUTH_AIO_INSTALLED:
+ pytest.skip("Skipped because google-api-core[async_rest] is not installed")
+ client = _get_operations_client(is_async=True, http_options=None)
+
+ with mock.patch.object(_get_session_type(is_async=True), "request") as req:
+ response_value = mock.Mock()
+ response_value.status_code = 400
+ response_value.read = mock.AsyncMock(return_value=b"{}")
+ mock_request = mock.MagicMock()
+ mock_request.method = "POST"
+ mock_request.url = (
+ "https://longrunning.googleapis.com/v1/operations/sample1:cancel"
+ )
+ response_value.request = mock_request
+ req.return_value = response_value
+ with pytest.raises(core_exceptions.GoogleAPIError):
+ await client.cancel_operation(name="sample0/operations/sample1")
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_credentials_transport_error(client_class, transport_class, credentials):
+
# It is an error to provide credentials and a transport instance.
- transport = transports.OperationsRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
+ transport = transport_class(credentials=credentials)
with pytest.raises(ValueError):
- AbstractOperationsClient(
+ client_class(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
- transport = transports.OperationsRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
+ transport = transport_class(credentials=credentials)
with pytest.raises(ValueError):
- AbstractOperationsClient(
+ client_class(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
- transport = transports.OperationsRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
- )
+ transport = transport_class(credentials=credentials)
with pytest.raises(ValueError):
- AbstractOperationsClient(
+ client_class(
client_options={"scopes": ["1", "2"]},
transport=transport,
)
-def test_transport_instance():
+@pytest.mark.parametrize(
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_transport_instance(client_class, transport_class, credentials):
# A client may be instantiated with a custom transport instance.
- transport = transports.OperationsRestTransport(
- credentials=ga_credentials.AnonymousCredentials(),
+ transport = transport_class(
+ credentials=credentials,
)
- client = AbstractOperationsClient(transport=transport)
+ client = client_class(transport=transport)
assert client.transport is transport
-@pytest.mark.parametrize("transport_class", [transports.OperationsRestTransport])
-def test_transport_adc(transport_class):
+@pytest.mark.parametrize(
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_transport_adc(client_class, transport_class, credentials):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
- adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ adc.return_value = (credentials, None)
transport_class()
adc.assert_called_once()
@@ -788,32 +1163,59 @@ def test_operations_base_transport_with_adc():
adc.assert_called_once()
-def test_operations_auth_adc():
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_operations_auth_adc(client_class):
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
- AbstractOperationsClient()
- adc.assert_called_once_with(
- scopes=None,
- default_scopes=(),
- quota_project_id=None,
- )
+
+ if "async" in str(client_class).lower():
+ # TODO(): Add support for adc to async REST transport.
+ # NOTE: Ideally, the logic for adc shouldn't be called if transport
+ # is set to async REST. If the user does not configure credentials
+ # of type `google.auth.aio.credentials.Credentials`,
+ # we should raise an exception to avoid the adc workflow.
+ with pytest.raises(google.auth.exceptions.InvalidType):
+ client_class()
+ else:
+ client_class()
+ adc.assert_called_once_with(
+ scopes=None,
+ default_scopes=(),
+ quota_project_id=None,
+ )
-def test_operations_http_transport_client_cert_source_for_mtls():
+# TODO(https://github.com/googleapis/python-api-core/issues/705): Add
+# testing for `transports.AsyncOperationsRestTransport` once MTLS is supported
+# in `google.auth.aio.transport`.
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.OperationsRestTransport,
+ ],
+)
+def test_operations_http_transport_client_cert_source_for_mtls(transport_class):
cred = ga_credentials.AnonymousCredentials()
with mock.patch(
"google.auth.transport.requests.AuthorizedSession.configure_mtls_channel"
) as mock_configure_mtls_channel:
- transports.OperationsRestTransport(
+ transport_class(
credentials=cred, client_cert_source_for_mtls=client_cert_source_callback
)
mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback)
-def test_operations_host_no_port():
- client = AbstractOperationsClient(
- credentials=ga_credentials.AnonymousCredentials(),
+@pytest.mark.parametrize(
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_operations_host_no_port(client_class, transport_class, credentials):
+ client = client_class(
+ credentials=credentials,
client_options=client_options.ClientOptions(
api_endpoint="longrunning.googleapis.com"
),
@@ -821,9 +1223,13 @@ def test_operations_host_no_port():
assert client.transport._host == "https://longrunning.googleapis.com"
-def test_operations_host_with_port():
- client = AbstractOperationsClient(
- credentials=ga_credentials.AnonymousCredentials(),
+@pytest.mark.parametrize(
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_operations_host_with_port(client_class, transport_class, credentials):
+ client = client_class(
+ credentials=credentials,
client_options=client_options.ClientOptions(
api_endpoint="longrunning.googleapis.com:8000"
),
@@ -831,127 +1237,165 @@ def test_operations_host_with_port():
assert client.transport._host == "https://longrunning.googleapis.com:8000"
-def test_common_billing_account_path():
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_common_billing_account_path(client_class):
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
- actual = AbstractOperationsClient.common_billing_account_path(billing_account)
+ actual = client_class.common_billing_account_path(billing_account)
assert expected == actual
-def test_parse_common_billing_account_path():
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_parse_common_billing_account_path(client_class):
expected = {
"billing_account": "clam",
}
- path = AbstractOperationsClient.common_billing_account_path(**expected)
+ path = client_class.common_billing_account_path(**expected)
# Check that the path construction is reversible.
- actual = AbstractOperationsClient.parse_common_billing_account_path(path)
+ actual = client_class.parse_common_billing_account_path(path)
assert expected == actual
-def test_common_folder_path():
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_common_folder_path(client_class):
folder = "whelk"
expected = "folders/{folder}".format(
folder=folder,
)
- actual = AbstractOperationsClient.common_folder_path(folder)
+ actual = client_class.common_folder_path(folder)
assert expected == actual
-def test_parse_common_folder_path():
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_parse_common_folder_path(client_class):
expected = {
"folder": "octopus",
}
- path = AbstractOperationsClient.common_folder_path(**expected)
+ path = client_class.common_folder_path(**expected)
# Check that the path construction is reversible.
- actual = AbstractOperationsClient.parse_common_folder_path(path)
+ actual = client_class.parse_common_folder_path(path)
assert expected == actual
-def test_common_organization_path():
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_common_organization_path(client_class):
organization = "oyster"
expected = "organizations/{organization}".format(
organization=organization,
)
- actual = AbstractOperationsClient.common_organization_path(organization)
+ actual = client_class.common_organization_path(organization)
assert expected == actual
-def test_parse_common_organization_path():
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_parse_common_organization_path(client_class):
expected = {
"organization": "nudibranch",
}
- path = AbstractOperationsClient.common_organization_path(**expected)
+ path = client_class.common_organization_path(**expected)
# Check that the path construction is reversible.
- actual = AbstractOperationsClient.parse_common_organization_path(path)
+ actual = client_class.parse_common_organization_path(path)
assert expected == actual
-def test_common_project_path():
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_common_project_path(client_class):
project = "cuttlefish"
expected = "projects/{project}".format(
project=project,
)
- actual = AbstractOperationsClient.common_project_path(project)
+ actual = client_class.common_project_path(project)
assert expected == actual
-def test_parse_common_project_path():
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_parse_common_project_path(client_class):
expected = {
"project": "mussel",
}
- path = AbstractOperationsClient.common_project_path(**expected)
+ path = client_class.common_project_path(**expected)
# Check that the path construction is reversible.
- actual = AbstractOperationsClient.parse_common_project_path(path)
+ actual = client_class.parse_common_project_path(path)
assert expected == actual
-def test_common_location_path():
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_common_location_path(client_class):
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
project=project,
location=location,
)
- actual = AbstractOperationsClient.common_location_path(project, location)
+ actual = client_class.common_location_path(project, location)
assert expected == actual
-def test_parse_common_location_path():
+@pytest.mark.parametrize(
+ "client_class",
+ PYPARAM_CLIENT,
+)
+def test_parse_common_location_path(client_class):
expected = {
"project": "scallop",
"location": "abalone",
}
- path = AbstractOperationsClient.common_location_path(**expected)
+ path = client_class.common_location_path(**expected)
# Check that the path construction is reversible.
- actual = AbstractOperationsClient.parse_common_location_path(path)
+ actual = client_class.parse_common_location_path(path)
assert expected == actual
-def test_client_withDEFAULT_CLIENT_INFO():
+@pytest.mark.parametrize(
+ "client_class,transport_class,credentials",
+ PYPARAM_CLIENT_TRANSPORT_CREDENTIALS,
+)
+def test_client_withDEFAULT_CLIENT_INFO(client_class, transport_class, credentials):
client_info = gapic_v1.client_info.ClientInfo()
-
- with mock.patch.object(
- transports.OperationsTransport, "_prep_wrapped_messages"
- ) as prep:
- AbstractOperationsClient(
- credentials=ga_credentials.AnonymousCredentials(),
+ with mock.patch.object(transport_class, "_prep_wrapped_messages") as prep:
+ client_class(
+ credentials=credentials,
client_info=client_info,
)
prep.assert_called_once_with(client_info)
- with mock.patch.object(
- transports.OperationsTransport, "_prep_wrapped_messages"
- ) as prep:
- transport_class = AbstractOperationsClient.get_transport_class()
+ with mock.patch.object(transport_class, "_prep_wrapped_messages") as prep:
transport_class(
- credentials=ga_credentials.AnonymousCredentials(),
+ credentials=credentials,
client_info=client_info,
)
prep.assert_called_once_with(client_info)
diff --git a/tests/unit/retry/test_retry_base.py b/tests/unit/retry/test_retry_base.py
index a0c6776b..212c4293 100644
--- a/tests/unit/retry/test_retry_base.py
+++ b/tests/unit/retry/test_retry_base.py
@@ -14,8 +14,8 @@
import itertools
import re
+from unittest import mock
-import mock
import pytest
import requests.exceptions
diff --git a/tests/unit/retry/test_retry_streaming.py b/tests/unit/retry/test_retry_streaming.py
index 01f35327..2499b2ae 100644
--- a/tests/unit/retry/test_retry_streaming.py
+++ b/tests/unit/retry/test_retry_streaming.py
@@ -14,7 +14,12 @@
import re
-import mock
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
+
import pytest
from google.api_core import exceptions
@@ -28,7 +33,36 @@ def test_retry_streaming_target_bad_sleep_generator():
with pytest.raises(
ValueError, match="Sleep generator stopped yielding sleep values"
):
- next(retry_streaming.retry_target_stream(None, None, [], None))
+ next(retry_streaming.retry_target_stream(None, lambda x: True, [], None))
+
+
+@mock.patch("time.sleep", autospec=True)
+def test_retry_streaming_target_dynamic_backoff(sleep):
+ """
+ sleep_generator should be iterated after on_error, to support dynamic backoff
+ """
+ from functools import partial
+
+ sleep.side_effect = RuntimeError("stop after sleep")
+ # start with empty sleep generator; values are added after exception in push_sleep_value
+ sleep_values = []
+ error_target = partial(TestStreamingRetry._generator_mock, error_on=0)
+ inserted_sleep = 99
+
+ def push_sleep_value(err):
+ sleep_values.append(inserted_sleep)
+
+ with pytest.raises(RuntimeError):
+ next(
+ retry_streaming.retry_target_stream(
+ error_target,
+ predicate=lambda x: True,
+ sleep_generator=sleep_values,
+ on_error=push_sleep_value,
+ )
+ )
+ assert sleep.call_count == 1
+ sleep.assert_called_once_with(inserted_sleep)
class TestStreamingRetry(Test_BaseRetry):
@@ -58,8 +92,8 @@ def if_exception_type(exc):
str(retry_),
)
+ @staticmethod
def _generator_mock(
- self,
num=5,
error_on=None,
return_val=None,
@@ -77,7 +111,7 @@ def _generator_mock(
"""
try:
for i in range(num):
- if error_on and i == error_on:
+ if error_on is not None and i == error_on:
raise ValueError("generator mock error")
yield i
return return_val
diff --git a/tests/unit/retry/test_retry_unary.py b/tests/unit/retry/test_retry_unary.py
index 7dcd8dd6..f5bbcff7 100644
--- a/tests/unit/retry/test_retry_unary.py
+++ b/tests/unit/retry/test_retry_unary.py
@@ -13,10 +13,14 @@
# limitations under the License.
import datetime
+import pytest
import re
-import mock
-import pytest
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
from google.api_core import exceptions
from google.api_core import retry
@@ -142,7 +146,33 @@ def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg):
def test_retry_target_bad_sleep_generator():
with pytest.raises(ValueError, match="Sleep generator"):
- retry.retry_target(mock.sentinel.target, mock.sentinel.predicate, [], None)
+ retry.retry_target(mock.sentinel.target, lambda x: True, [], None)
+
+
+@mock.patch("time.sleep", autospec=True)
+def test_retry_target_dynamic_backoff(sleep):
+ """
+ sleep_generator should be iterated after on_error, to support dynamic backoff
+ """
+ sleep.side_effect = RuntimeError("stop after sleep")
+ # start with empty sleep generator; values are added after exception in push_sleep_value
+ sleep_values = []
+ exception = ValueError("trigger retry")
+ error_target = mock.Mock(side_effect=exception)
+ inserted_sleep = 99
+
+ def push_sleep_value(err):
+ sleep_values.append(inserted_sleep)
+
+ with pytest.raises(RuntimeError):
+ retry.retry_target(
+ error_target,
+ predicate=lambda x: True,
+ sleep_generator=sleep_values,
+ on_error=push_sleep_value,
+ )
+ assert sleep.call_count == 1
+ sleep.assert_called_once_with(inserted_sleep)
class TestRetry(Test_BaseRetry):
diff --git a/tests/unit/test_bidi.py b/tests/unit/test_bidi.py
index 84ac9dc5..7640367c 100644
--- a/tests/unit/test_bidi.py
+++ b/tests/unit/test_bidi.py
@@ -16,8 +16,14 @@
import logging
import queue
import threading
+import time
+
+try:
+ from unittest import mock
+ from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401
+except ImportError: # pragma: NO COVER
+ import mock # type: ignore
-import mock
import pytest
try:
@@ -291,6 +297,9 @@ def test_close(self):
# ensure the request queue was signaled to stop.
assert bidi_rpc.pending_requests == 1
assert bidi_rpc._request_queue.get() is None
+ # ensure request and callbacks are cleaned up
+ assert bidi_rpc._initial_request is None
+ assert not bidi_rpc._callbacks
def test_close_no_rpc(self):
bidi_rpc = bidi.BidiRpc(None)
@@ -618,6 +627,8 @@ def cancel_side_effect():
assert bidi_rpc.pending_requests == 1
assert bidi_rpc._request_queue.get() is None
assert bidi_rpc._finalized
+ assert bidi_rpc._initial_request is None
+ assert not bidi_rpc._callbacks
def test_reopen_failure_on_rpc_restart(self):
error1 = ValueError("1")
@@ -772,6 +783,7 @@ def on_response(response):
consumer.stop()
assert consumer.is_active is False
+ assert consumer._on_response is None
def test_wake_on_error(self):
should_continue = threading.Event()
@@ -879,6 +891,60 @@ def close_side_effect():
consumer.stop()
assert consumer.is_active is False
+ assert consumer._on_response is None
# calling stop twice should not result in an error.
consumer.stop()
+
+ def test_stop_error_logs(self, caplog):
+ """
+ Closing the client should result in no internal error logs
+
+ https://github.com/googleapis/python-api-core/issues/788
+ """
+ caplog.set_level(logging.DEBUG)
+ bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True)
+ bidi_rpc.is_active = True
+ on_response = mock.Mock(spec=["__call__"])
+
+ consumer = bidi.BackgroundConsumer(bidi_rpc, on_response)
+
+ consumer.start()
+ consumer.stop()
+ # let the background thread run for a while before exiting
+ time.sleep(0.1)
+ bidi_rpc.is_active = False
+ # running thread should not result in error logs
+ error_logs = [r.message for r in caplog.records if r.levelname == "ERROR"]
+ assert not error_logs, f"Found unexpected ERROR logs: {error_logs}"
+ bidi_rpc.is_active = False
+
+ def test_fatal_exceptions_can_inform_consumer(self, caplog):
+ """
+ https://github.com/googleapis/python-api-core/issues/820
+ Exceptions thrown in the BackgroundConsumer not caught by `should_recover` / `should_terminate`
+ on the RPC should be bubbled back to the caller through `on_fatal_exception`, if passed.
+ """
+ caplog.set_level(logging.DEBUG)
+
+ for fatal_exception in (
+ ValueError("some non-api error"),
+ exceptions.PermissionDenied("some api error"),
+ ):
+ bidi_rpc = mock.create_autospec(bidi.ResumableBidiRpc, instance=True)
+ bidi_rpc.is_active = True
+ on_response = mock.Mock(spec=["__call__"])
+
+ on_fatal_exception = mock.Mock(spec=["__call__"])
+
+ bidi_rpc.open.side_effect = fatal_exception
+
+ consumer = bidi.BackgroundConsumer(
+ bidi_rpc, on_response, on_fatal_exception
+ )
+
+ consumer.start()
+ # let the background thread run for a while before exiting
+ time.sleep(0.1)
+
+ on_fatal_exception.assert_called_once_with(fatal_exception)
diff --git a/tests/unit/test_client_info.py b/tests/unit/test_client_info.py
index 3361fef6..3eacabca 100644
--- a/tests/unit/test_client_info.py
+++ b/tests/unit/test_client_info.py
@@ -46,6 +46,7 @@ def test_constructor_options():
client_library_version="5",
user_agent="6",
rest_version="7",
+ protobuf_runtime_version="8",
)
assert info.python_version == "1"
@@ -55,11 +56,15 @@ def test_constructor_options():
assert info.client_library_version == "5"
assert info.user_agent == "6"
assert info.rest_version == "7"
+ assert info.protobuf_runtime_version == "8"
def test_to_user_agent_minimal():
info = client_info.ClientInfo(
- python_version="1", api_core_version="2", grpc_version=None
+ python_version="1",
+ api_core_version="2",
+ grpc_version=None,
+ protobuf_runtime_version=None,
)
user_agent = info.to_user_agent()
@@ -75,11 +80,12 @@ def test_to_user_agent_full():
gapic_version="4",
client_library_version="5",
user_agent="app-name/1.0",
+ protobuf_runtime_version="6",
)
user_agent = info.to_user_agent()
- assert user_agent == "app-name/1.0 gl-python/1 grpc/2 gax/3 gapic/4 gccl/5"
+ assert user_agent == "app-name/1.0 gl-python/1 grpc/2 gax/3 gapic/4 gccl/5 pb/6"
def test_to_user_agent_rest():
diff --git a/tests/unit/test_client_logging.py b/tests/unit/test_client_logging.py
new file mode 100644
index 00000000..b3b0b5c8
--- /dev/null
+++ b/tests/unit/test_client_logging.py
@@ -0,0 +1,140 @@
+import json
+import logging
+from unittest import mock
+
+from google.api_core.client_logging import (
+ setup_logging,
+ initialize_logging,
+ StructuredLogFormatter,
+)
+
+
+def reset_logger(scope):
+ logger = logging.getLogger(scope)
+ logger.handlers = []
+ logger.setLevel(logging.NOTSET)
+ logger.propagate = True
+
+
+def test_setup_logging_w_no_scopes():
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ setup_logging()
+ base_logger = logging.getLogger("foogle")
+ assert base_logger.handlers == []
+ assert not base_logger.propagate
+ assert base_logger.level == logging.NOTSET
+
+ reset_logger("foogle")
+
+
+def test_setup_logging_w_base_scope():
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ setup_logging("foogle")
+ base_logger = logging.getLogger("foogle")
+ assert isinstance(base_logger.handlers[0], logging.StreamHandler)
+ assert not base_logger.propagate
+ assert base_logger.level == logging.DEBUG
+
+ reset_logger("foogle")
+
+
+def test_setup_logging_w_configured_scope():
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ base_logger = logging.getLogger("foogle")
+ base_logger.propagate = False
+ setup_logging("foogle")
+ assert base_logger.handlers == []
+ assert not base_logger.propagate
+ assert base_logger.level == logging.NOTSET
+
+ reset_logger("foogle")
+
+
+def test_setup_logging_w_module_scope():
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ setup_logging("foogle.bar")
+
+ base_logger = logging.getLogger("foogle")
+ assert base_logger.handlers == []
+ assert not base_logger.propagate
+ assert base_logger.level == logging.NOTSET
+
+ module_logger = logging.getLogger("foogle.bar")
+ assert isinstance(module_logger.handlers[0], logging.StreamHandler)
+ assert not module_logger.propagate
+ assert module_logger.level == logging.DEBUG
+
+ reset_logger("foogle")
+ reset_logger("foogle.bar")
+
+
+def test_setup_logging_w_incorrect_scope():
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ setup_logging("abc")
+
+ base_logger = logging.getLogger("foogle")
+ assert base_logger.handlers == []
+ assert not base_logger.propagate
+ assert base_logger.level == logging.NOTSET
+
+ # TODO(https://github.com/googleapis/python-api-core/issues/759): update test once we add logic to ignore an incorrect scope.
+ logger = logging.getLogger("abc")
+ assert isinstance(logger.handlers[0], logging.StreamHandler)
+ assert not logger.propagate
+ assert logger.level == logging.DEBUG
+
+ reset_logger("foogle")
+ reset_logger("abc")
+
+
+def test_initialize_logging():
+
+ with mock.patch("os.getenv", return_value="foogle.bar"):
+ with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"):
+ initialize_logging()
+
+ base_logger = logging.getLogger("foogle")
+ assert base_logger.handlers == []
+ assert not base_logger.propagate
+ assert base_logger.level == logging.NOTSET
+
+ module_logger = logging.getLogger("foogle.bar")
+ assert isinstance(module_logger.handlers[0], logging.StreamHandler)
+ assert not module_logger.propagate
+ assert module_logger.level == logging.DEBUG
+
+ # Check that `initialize_logging()` is a no-op after the first time by verifying that user-set configs are not modified:
+ base_logger.propagate = True
+ module_logger.propagate = True
+
+ initialize_logging()
+
+ assert base_logger.propagate
+ assert module_logger.propagate
+
+ reset_logger("foogle")
+ reset_logger("foogle.bar")
+
+
+def test_structured_log_formatter():
+ # TODO(https://github.com/googleapis/python-api-core/issues/761): Test additional fields when implemented.
+ record = logging.LogRecord(
+ name="Appelation",
+ level=logging.DEBUG,
+ msg="This is a test message.",
+ pathname="some/path",
+ lineno=25,
+ args=None,
+ exc_info=None,
+ )
+
+ # Extra fields:
+ record.rpcName = "bar"
+
+ formatted_msg = StructuredLogFormatter().format(record)
+ parsed_msg = json.loads(formatted_msg)
+
+ assert parsed_msg["name"] == "Appelation"
+ assert parsed_msg["severity"] == "DEBUG"
+ assert parsed_msg["message"] == "This is a test message."
+ assert parsed_msg["rpcName"] == "bar"
diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py
index 07a36817..e3f8f909 100644
--- a/tests/unit/test_exceptions.py
+++ b/tests/unit/test_exceptions.py
@@ -14,8 +14,8 @@
import http.client
import json
+from unittest import mock
-import mock
import pytest
import requests
diff --git a/tests/unit/test_extended_operation.py b/tests/unit/test_extended_operation.py
index 53af5204..ab550662 100644
--- a/tests/unit/test_extended_operation.py
+++ b/tests/unit/test_extended_operation.py
@@ -15,8 +15,8 @@
import dataclasses
import enum
import typing
+from unittest import mock
-import mock
import pytest
from google.api_core import exceptions
diff --git a/tests/unit/test_grpc_helpers.py b/tests/unit/test_grpc_helpers.py
index 59442d43..8de9d8c0 100644
--- a/tests/unit/test_grpc_helpers.py
+++ b/tests/unit/test_grpc_helpers.py
@@ -12,7 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import mock
+from unittest import mock
+
import pytest
try:
diff --git a/tests/unit/test_operation.py b/tests/unit/test_operation.py
index f029866c..80680720 100644
--- a/tests/unit/test_operation.py
+++ b/tests/unit/test_operation.py
@@ -13,7 +13,8 @@
# limitations under the License.
-import mock
+from unittest import mock
+
import pytest
try:
diff --git a/tests/unit/test_page_iterator.py b/tests/unit/test_page_iterator.py
index cf43aedf..560722c5 100644
--- a/tests/unit/test_page_iterator.py
+++ b/tests/unit/test_page_iterator.py
@@ -14,8 +14,8 @@
import math
import types
+from unittest import mock
-import mock
import pytest
from google.api_core import page_iterator
diff --git a/tests/unit/test_path_template.py b/tests/unit/test_path_template.py
index 808b36f3..c34dd0f3 100644
--- a/tests/unit/test_path_template.py
+++ b/tests/unit/test_path_template.py
@@ -13,8 +13,8 @@
# limitations under the License.
from __future__ import unicode_literals
+from unittest import mock
-import mock
import pytest
from google.api import auth_pb2
diff --git a/tests/unit/test_protobuf_helpers.py b/tests/unit/test_protobuf_helpers.py
index 5b2c6dfd..5678d3bc 100644
--- a/tests/unit/test_protobuf_helpers.py
+++ b/tests/unit/test_protobuf_helpers.py
@@ -13,6 +13,7 @@
# limitations under the License.
import pytest
+import re
from google.api import http_pb2
from google.api_core import protobuf_helpers
@@ -65,7 +66,12 @@ def test_from_any_pb_failure():
in_message = any_pb2.Any()
in_message.Pack(date_pb2.Date(year=1990))
- with pytest.raises(TypeError):
+ with pytest.raises(
+ TypeError,
+ match=re.escape(
+ "Could not convert `google.type.Date` with underlying type `google.protobuf.any_pb2.Any` to `google.type.TimeOfDay`"
+ ),
+ ):
protobuf_helpers.from_any_pb(timeofday_pb2.TimeOfDay, in_message)
diff --git a/tests/unit/test_timeout.py b/tests/unit/test_timeout.py
index 0bcf07f0..2c20202b 100644
--- a/tests/unit/test_timeout.py
+++ b/tests/unit/test_timeout.py
@@ -14,8 +14,7 @@
import datetime
import itertools
-
-import mock
+from unittest import mock
from google.api_core import timeout as timeouts
@@ -85,9 +84,9 @@ def _clock():
wrapped()
target.assert_called_with(timeout=3.0)
wrapped()
- target.assert_called_with(timeout=0.0)
+ target.assert_called_with(timeout=42.0)
wrapped()
- target.assert_called_with(timeout=0.0)
+ target.assert_called_with(timeout=42.0)
def test_apply_no_timeout(self):
target = mock.Mock(spec=["__call__", "__name__"], __name__="target")