diff --git a/.coveragerc b/.coveragerc index dd39c8546..1be6bc67f 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,35 +1,14 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! [run] branch = True [report] -fail_under = 100 show_missing = True +omit = + google/cloud/__init__.py + google/pubsub/__init__.py + google/pubsub/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py \ No newline at end of file diff --git a/.flake8 b/.flake8 index ed9316381..90316de21 100644 --- a/.flake8 +++ b/.flake8 @@ -1,31 +1,33 @@ # -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by synthtool. DO NOT EDIT! +# [flake8] -ignore = E203, E266, E501, W503 +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues +ignore = E203, E231, E266, E501, W503 exclude = - # Exclude generated code. - **/proto/** + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint **/gapic/** **/services/** **/types/** + # Exclude Protobuf gencode *_pb2.py # Standard linting exemptions. + **/.nox/** __pycache__, .git, *.pyc, diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index cf01548a9..f1b33465e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,9 +3,10 @@ # # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax +# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. +# @googleapis/yoshi-python @googleapis/api-pubsub are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/api-pubsub -# The python-samples-owners team is the default owner for anything not -# explicitly taken by someone else. - - /samples/ @anguillanneuf @hongalex @googleapis/python-samples-owners +# @googleapis/python-samples-reviewers @googleapis/api-pubsub are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/api-pubsub diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml new file mode 100644 index 000000000..311ebbb85 --- /dev/null +++ b/.github/auto-approve.yml @@ -0,0 +1,3 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve +processes: + - "OwlBotTemplateChanges" diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml new file mode 100644 index 000000000..21786a4eb --- /dev/null +++ b/.github/auto-label.yaml @@ -0,0 +1,20 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +requestsize: + enabled: true + +path: + pullrequest: true + paths: + samples: "samples" diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml new file mode 100644 index 000000000..ac5c87339 --- /dev/null +++ b/.github/blunderbuss.yml @@ -0,0 +1,17 @@ +# Blunderbuss config +# +# This file controls who is assigned for pull requests and issues. +# Note: This file is autogenerated. To make changes to the assignee +# team, please update `codeowner_team` in `.repo-metadata.json`. +assign_issues: + - abbrowne126 + +assign_issues_by: + - labels: + - "samples" + to: + - googleapis/python-samples-reviewers + - abbrowne126 + +assign_prs: + - abbrowne126 diff --git a/.github/flakybot.yaml b/.github/flakybot.yaml new file mode 100644 index 000000000..cb83375f9 --- /dev/null +++ b/.github/flakybot.yaml @@ -0,0 +1 @@ +issuePriority: p2 diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml new file mode 100644 index 000000000..6fe78aa79 --- /dev/null +++ b/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/.github/release-please.yml b/.github/release-please.yml deleted file mode 100644 index 4507ad059..000000000 --- a/.github/release-please.yml +++ /dev/null @@ -1 +0,0 @@ -releaseType: python diff --git a/google/cloud/pubsub_v1/gapic/__init__.py b/.github/snippet-bot.yml similarity index 100% rename from google/cloud/pubsub_v1/gapic/__init__.py rename to .github/snippet-bot.yml diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml new file mode 100644 index 000000000..bfde18cc0 --- /dev/null +++ b/.github/sync-repo-settings.yaml @@ -0,0 +1,32 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings +# Rules for main branch protection +branchProtectionRules: +# Identifies the protection rule pattern. Name of the branch to be protected. +# Defaults to `main` +- pattern: main + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: true + requiredStatusCheckContexts: + - 'Kokoro' + - 'Kokoro - Against Pub/Sub Lite samples' + - 'cla/google' + - 'Samples - Lint' + - 'Samples - Python 3.7' + - 'Samples - Python 3.8' + - 'Samples - Python 3.9' + - 'Samples - Python 3.10' + - 'Samples - Python 3.11' + - 'Samples - Python 3.12' + - 'OwlBot Post Processor' + - 'docs' + - 'docfx' + - 'lint' + - 'unit (3.7)' + - 'unit (3.8)' + - 'unit (3.9)' + - 'unit (3.10)' + - 'unit (3.11)' + - 'unit (3.12)' + - 'unit (3.13)' + - 'unit (3.14)' + - 'cover' diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 000000000..0d0fdb861 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,38 @@ +on: + pull_request: + branches: + - main +name: docs +jobs: + docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v5 + - name: Setup Python + uses: actions/setup-python@v6 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docs + run: | + nox -s docs + docfx: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v5 + - name: Setup Python + uses: actions/setup-python@v6 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docfx + run: | + nox -s docfx diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 000000000..6204983fd --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,25 @@ +on: + pull_request: + branches: + - main +name: lint +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v5 + - name: Setup Python + uses: actions/setup-python@v6 + with: + python-version: "3.14" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run lint + run: | + nox -s lint + - name: Run lint_setup_py + run: | + nox -s lint_setup_py diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml new file mode 100644 index 000000000..04e411304 --- /dev/null +++ b/.github/workflows/unittest.yml @@ -0,0 +1,61 @@ +on: + pull_request: + branches: + - main +name: unittest +jobs: + unit: + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories + runs-on: ubuntu-22.04 + strategy: + matrix: + python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13', '3.14'] + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run unit tests + env: + COVERAGE_FILE: .coverage-${{ matrix.python }} + run: | + nox -s unit-${{ matrix.python }} + - name: Upload coverage results + uses: actions/upload-artifact@v4 + with: + name: coverage-artifact-${{ matrix.python }} + path: .coverage-${{ matrix.python }} + include-hidden-files: true + + cover: + runs-on: ubuntu-latest + needs: + - unit + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.14" + - name: Install coverage + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install coverage + - name: Download coverage results + uses: actions/download-artifact@v4 + with: + path: .coverage-results/ + - name: Report coverage results + run: | + find .coverage-results -type f -name '*.zip' -exec unzip {} \; + coverage combine .coverage-results/**/.coverage* + coverage report --show-missing --fail-under=99 diff --git a/.gitignore b/.gitignore index b87e1ed58..d083ea1dd 100644 --- a/.gitignore +++ b/.gitignore @@ -46,15 +46,19 @@ pip-log.txt # Built documentation docs/_build bigquery/docs/generated +docs.metadata # Virtual environment env/ +venv/ + +# Test logs coverage.xml -sponge_log.xml +*sponge_log.xml # System test environment variables. system_tests/local_test_setup # Make sure a generated file isn't accidentally committed. pylintrc -pylintrc.test \ No newline at end of file +pylintrc.test diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 6a68ebd10..d41b45aa1 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2018 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,7 +15,13 @@ set -eo pipefail -cd github/python-pubsub +CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") + +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") +fi + +pushd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -24,16 +30,31 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] +then + export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +fi # Setup project id. -export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") - -# Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version - -python3.6 -m nox +if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] +then + export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +fi + +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi + +# If NOX_SESSION is set, it only runs the specified session, +# otherwise run all the sessions. +if [[ -n "${NOX_SESSION:-}" ]]; then + python3 -m nox -s ${NOX_SESSION:-} +else + python3 -m nox +fi diff --git a/.kokoro/continuous/prerelease-deps.cfg b/.kokoro/continuous/prerelease-deps.cfg new file mode 100644 index 000000000..3595fb43f --- /dev/null +++ b/.kokoro/continuous/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg deleted file mode 100644 index b602fa542..000000000 --- a/.kokoro/docs/common.cfg +++ /dev/null @@ -1,48 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/publish-docs.sh" -} - -env_vars: { - key: "STAGING_BUCKET" - value: "docs-staging" -} - -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} \ No newline at end of file diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg deleted file mode 100644 index 8f43917d9..000000000 --- a/.kokoro/docs/docs.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh new file mode 100755 index 000000000..c435402f4 --- /dev/null +++ b/.kokoro/populate-secrets.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Copyright 2024 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + + +# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: +# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com +SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" +mkdir -p ${SECRET_LOCATION} +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + msg "Retrieving secret ${key}" + docker run --entrypoint=gcloud \ + --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ + gcr.io/google.com/cloudsdktool/cloud-sdk \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret ${key} > \ + "${SECRET_LOCATION}/${key}" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + fi +done diff --git a/.kokoro/presubmit-against-pubsublite-samples.sh b/.kokoro/presubmit-against-pubsublite-samples.sh new file mode 100755 index 000000000..639cbb8d3 --- /dev/null +++ b/.kokoro/presubmit-against-pubsublite-samples.sh @@ -0,0 +1,106 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.9 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +cd github/python-pubsub + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) + +# Clone googleapis/python-pubsublite +git clone https://github.com/googleapis/python-pubsublite.git + +# Find all requirements.txt in the Pub/Sub Lite samples directory (may break on whitespace). +for file in python-pubsublite/samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use pytest to execute tests for py-3.8 + python3.8 -m venv py-3.8 + source py-3.8/bin/activate + # Install python-pubsublite samples tests requirements. + python -m pip install --upgrade pip + python -m pip install -r requirements.txt -q + python -m pip install -r requirements-test.txt -q + # Install python-pubsub from source. + python -m pip install -e "$ROOT" -q + python -m pytest quickstart_test.py + EXIT=$? + + deactivate py-3.8 + rm -rf py-3.8/ + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/.kokoro/presubmit/prerelease-deps.cfg b/.kokoro/presubmit/prerelease-deps.cfg new file mode 100644 index 000000000..3595fb43f --- /dev/null +++ b/.kokoro/presubmit/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/.kokoro/presubmit/presubmit-against-pubsublite-samples.cfg b/.kokoro/presubmit/presubmit-against-pubsublite-samples.cfg new file mode 100644 index 000000000..0ad289456 --- /dev/null +++ b/.kokoro/presubmit/presubmit-against-pubsublite-samples.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh" +} \ No newline at end of file diff --git a/.kokoro/presubmit/presubmit.cfg b/.kokoro/presubmit/presubmit.cfg index 8f43917d9..227ccdf47 100644 --- a/.kokoro/presubmit/presubmit.cfg +++ b/.kokoro/presubmit/presubmit.cfg @@ -1 +1,6 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "NOX_SESSION" + value: "system-3.12 blacken mypy format" +} diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh deleted file mode 100755 index f462c727b..000000000 --- a/.kokoro/publish-docs.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -cd github/python-pubsub - -# Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version - -# build docs -nox -s docs - -python3 -m pip install gcp-docuploader - -# install a json parser -sudo apt-get update -sudo apt-get -y install software-properties-common -sudo add-apt-repository universe -sudo apt-get update -sudo apt-get -y install jq - -# create metadata -python3 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging diff --git a/.kokoro/release.sh b/.kokoro/release.sh deleted file mode 100755 index 321ef575c..000000000 --- a/.kokoro/release.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Start the releasetool reporter -python3 -m pip install gcp-releasetool -python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. -python3 -m pip install --upgrade twine wheel setuptools - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") -cd github/python-pubsub -python3 setup.py sdist bdist_wheel -twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg deleted file mode 100644 index d9c893b59..000000000 --- a/.kokoro/release/common.cfg +++ /dev/null @@ -1,64 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/release.sh" -} - -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - -# Fetch magictoken to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "releasetool-magictoken" - } - } -} - -# Fetch api key to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "magic-github-proxy-api-key" - } - } -} diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg deleted file mode 100644 index 8f43917d9..000000000 --- a/.kokoro/release/release.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg index 7dcd14193..a0106ace3 100644 --- a/.kokoro/samples/lint/common.cfg +++ b/.kokoro/samples/lint/common.cfg @@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.10/common.cfg similarity index 78% rename from .kokoro/samples/python3.6/common.cfg rename to .kokoro/samples/python3.10/common.cfg index 354ad19ef..363d8b0f7 100644 --- a/.kokoro/samples/python3.6/common.cfg +++ b/.kokoro/samples/python3.10/common.cfg @@ -10,7 +10,13 @@ action { # Specify which tests to run env_vars: { key: "RUN_TESTS_SESSION" - value: "py-3.6" + value: "py-3.10" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-310" } env_vars: { @@ -31,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.10/continuous.cfg similarity index 100% rename from .kokoro/samples/python3.6/presubmit.cfg rename to .kokoro/samples/python3.10/continuous.cfg diff --git a/.kokoro/samples/python3.10/periodic-head.cfg b/.kokoro/samples/python3.10/periodic-head.cfg new file mode 100644 index 000000000..f9cfcd33e --- /dev/null +++ b/.kokoro/samples/python3.10/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.10/periodic.cfg similarity index 98% rename from .kokoro/samples/python3.6/periodic.cfg rename to .kokoro/samples/python3.10/periodic.cfg index 50fec9649..71cd1e597 100644 --- a/.kokoro/samples/python3.6/periodic.cfg +++ b/.kokoro/samples/python3.10/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/samples/python3.10/presubmit.cfg b/.kokoro/samples/python3.10/presubmit.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.10/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.11/common.cfg b/.kokoro/samples/python3.11/common.cfg new file mode 100644 index 000000000..f337a0d54 --- /dev/null +++ b/.kokoro/samples/python3.11/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.11" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-311" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.11/continuous.cfg b/.kokoro/samples/python3.11/continuous.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.11/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.11/periodic-head.cfg b/.kokoro/samples/python3.11/periodic-head.cfg new file mode 100644 index 000000000..f9cfcd33e --- /dev/null +++ b/.kokoro/samples/python3.11/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.11/periodic.cfg b/.kokoro/samples/python3.11/periodic.cfg new file mode 100644 index 000000000..71cd1e597 --- /dev/null +++ b/.kokoro/samples/python3.11/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.11/presubmit.cfg b/.kokoro/samples/python3.11/presubmit.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.11/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.12/common.cfg b/.kokoro/samples/python3.12/common.cfg new file mode 100644 index 000000000..ae6100772 --- /dev/null +++ b/.kokoro/samples/python3.12/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.12" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-312" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.12/continuous.cfg b/.kokoro/samples/python3.12/continuous.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.12/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.12/periodic-head.cfg b/.kokoro/samples/python3.12/periodic-head.cfg new file mode 100644 index 000000000..f9cfcd33e --- /dev/null +++ b/.kokoro/samples/python3.12/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.12/periodic.cfg b/.kokoro/samples/python3.12/periodic.cfg new file mode 100644 index 000000000..71cd1e597 --- /dev/null +++ b/.kokoro/samples/python3.12/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.12/presubmit.cfg b/.kokoro/samples/python3.12/presubmit.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.12/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.13/common.cfg b/.kokoro/samples/python3.13/common.cfg new file mode 100644 index 000000000..96783769b --- /dev/null +++ b/.kokoro/samples/python3.13/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.13" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-313" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" diff --git a/.kokoro/samples/python3.13/continuous.cfg b/.kokoro/samples/python3.13/continuous.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.13/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.13/periodic-head.cfg b/.kokoro/samples/python3.13/periodic-head.cfg new file mode 100644 index 000000000..f9cfcd33e --- /dev/null +++ b/.kokoro/samples/python3.13/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.13/periodic.cfg b/.kokoro/samples/python3.13/periodic.cfg new file mode 100644 index 000000000..71cd1e597 --- /dev/null +++ b/.kokoro/samples/python3.13/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.13/presubmit.cfg b/.kokoro/samples/python3.13/presubmit.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.13/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.14/common.cfg b/.kokoro/samples/python3.14/common.cfg new file mode 100644 index 000000000..f6feff705 --- /dev/null +++ b/.kokoro/samples/python3.14/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.14" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-314" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.14/continuous.cfg similarity index 99% rename from .kokoro/samples/python3.6/continuous.cfg rename to .kokoro/samples/python3.14/continuous.cfg index 7218af149..b19681787 100644 --- a/.kokoro/samples/python3.6/continuous.cfg +++ b/.kokoro/samples/python3.14/continuous.cfg @@ -4,4 +4,3 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "True" } - diff --git a/.kokoro/samples/python3.14/periodic-head.cfg b/.kokoro/samples/python3.14/periodic-head.cfg new file mode 100644 index 000000000..f9cfcd33e --- /dev/null +++ b/.kokoro/samples/python3.14/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.14/periodic.cfg b/.kokoro/samples/python3.14/periodic.cfg new file mode 100644 index 000000000..71cd1e597 --- /dev/null +++ b/.kokoro/samples/python3.14/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.14/presubmit.cfg b/.kokoro/samples/python3.14/presubmit.cfg new file mode 100644 index 000000000..b19681787 --- /dev/null +++ b/.kokoro/samples/python3.14/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index becd0399d..9156c5975 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.7" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py37" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-pubsub/.kokoro/test-samples.sh" @@ -31,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 000000000..f9cfcd33e --- /dev/null +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg index 50fec9649..71cd1e597 100644 --- a/.kokoro/samples/python3.7/periodic.cfg +++ b/.kokoro/samples/python3.7/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index 685dfdc59..5922bef07 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.8" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py38" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-pubsub/.kokoro/test-samples.sh" @@ -31,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 000000000..f9cfcd33e --- /dev/null +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg index 50fec9649..71cd1e597 100644 --- a/.kokoro/samples/python3.8/periodic.cfg +++ b/.kokoro/samples/python3.8/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg new file mode 100644 index 000000000..a69739cce --- /dev/null +++ b/.kokoro/samples/python3.9/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.9" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py39" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.9/continuous.cfg b/.kokoro/samples/python3.9/continuous.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.9/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg new file mode 100644 index 000000000..f9cfcd33e --- /dev/null +++ b/.kokoro/samples/python3.9/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg new file mode 100644 index 000000000..71cd1e597 --- /dev/null +++ b/.kokoro/samples/python3.9/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.9/presubmit.cfg b/.kokoro/samples/python3.9/presubmit.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.9/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/tests/system/gapic/v1/test_system_publisher_v1.py b/.kokoro/test-samples-against-head.sh old mode 100644 new mode 100755 similarity index 55% rename from tests/system/gapic/v1/test_system_publisher_v1.py rename to .kokoro/test-samples-against-head.sh index 2ccebf07f..e9d8bd79a --- a/tests/system/gapic/v1/test_system_publisher_v1.py +++ b/.kokoro/test-samples-against-head.sh @@ -1,6 +1,5 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC +#!/bin/bash +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,17 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os -import time - -from google.cloud import pubsub_v1 -from google.cloud.pubsub_v1.proto import pubsub_pb2 - +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. -class TestSystemPublisher(object): - def test_list_topics(self): - project_id = os.environ["PROJECT_ID"] +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar - client = pubsub_v1.PublisherClient() - project = client.project_path(project_id) - response = client.list_topics(project) +exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh new file mode 100755 index 000000000..53e365bc4 --- /dev/null +++ b/.kokoro/test-samples-impl.sh @@ -0,0 +1,103 @@ +#!/bin/bash +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples don't exist +if ! find samples -name 'requirements.txt' | grep -q .; then + echo "No tests run. './samples/**/requirements.txt' not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +# `virtualenv==20.26.6` is added for Python 3.7 compatibility +python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6 + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.9 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index 98851b56b..7933d8201 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. # `-e` enables the script to automatically fail when a command fails # `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero @@ -20,85 +24,21 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-pubsub - # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." LATEST_RELEASE=$(git describe --abbrev=0 --tags) git checkout $LATEST_RELEASE -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.6 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the Build Cop Bot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop - $KOKORO_GFILE_DIR/linux_amd64/buildcop + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh fi +fi - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" \ No newline at end of file +exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index e8c4251f3..48f796997 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2017 Google Inc. +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,9 +15,14 @@ set -eo pipefail -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$? +# Always run the cleanup script, regardless of the success of bouncing into +# the container. +function cleanup() { + chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + echo "cleanup"; +} +trap cleanup EXIT -chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh -${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true - -exit ${ret_code} +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh new file mode 100755 index 000000000..35fa52923 --- /dev/null +++ b/.kokoro/trampoline_v2.sh @@ -0,0 +1,487 @@ +#!/usr/bin/env bash +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# trampoline_v2.sh +# +# This script does 3 things. +# +# 1. Prepare the Docker image for the test +# 2. Run the Docker with appropriate flags to run the test +# 3. Upload the newly built Docker image +# +# in a way that is somewhat compatible with trampoline_v1. +# +# To run this script, first download few files from gcs to /dev/shm. +# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). +# +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm +# +# Then run the script. +# .kokoro/trampoline_v2.sh +# +# These environment variables are required: +# TRAMPOLINE_IMAGE: The docker image to use. +# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. +# +# You can optionally change these environment variables: +# TRAMPOLINE_IMAGE_UPLOAD: +# (true|false): Whether to upload the Docker image after the +# successful builds. +# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. +# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. +# Defaults to /workspace. +# Potentially there are some repo specific envvars in .trampolinerc in +# the project root. + + +set -euo pipefail + +TRAMPOLINE_VERSION="2.0.5" + +if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then + readonly IO_COLOR_RED="$(tput setaf 1)" + readonly IO_COLOR_GREEN="$(tput setaf 2)" + readonly IO_COLOR_YELLOW="$(tput setaf 3)" + readonly IO_COLOR_RESET="$(tput sgr0)" +else + readonly IO_COLOR_RED="" + readonly IO_COLOR_GREEN="" + readonly IO_COLOR_YELLOW="" + readonly IO_COLOR_RESET="" +fi + +function function_exists { + [ $(LC_ALL=C type -t $1)"" == "function" ] +} + +# Logs a message using the given color. The first argument must be one +# of the IO_COLOR_* variables defined above, such as +# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the +# given color. The log message will also have an RFC-3339 timestamp +# prepended (in UTC). You can disable the color output by setting +# TERM=vt100. +function log_impl() { + local color="$1" + shift + local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" + echo "================================================================" + echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" + echo "================================================================" +} + +# Logs the given message with normal coloring and a timestamp. +function log() { + log_impl "${IO_COLOR_RESET}" "$@" +} + +# Logs the given message in green with a timestamp. +function log_green() { + log_impl "${IO_COLOR_GREEN}" "$@" +} + +# Logs the given message in yellow with a timestamp. +function log_yellow() { + log_impl "${IO_COLOR_YELLOW}" "$@" +} + +# Logs the given message in red with a timestamp. +function log_red() { + log_impl "${IO_COLOR_RED}" "$@" +} + +readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) +readonly tmphome="${tmpdir}/h" +mkdir -p "${tmphome}" + +function cleanup() { + rm -rf "${tmpdir}" +} +trap cleanup EXIT + +RUNNING_IN_CI="${RUNNING_IN_CI:-false}" + +# The workspace in the container, defaults to /workspace. +TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" + +pass_down_envvars=( + # TRAMPOLINE_V2 variables. + # Tells scripts whether they are running as part of CI or not. + "RUNNING_IN_CI" + # Indicates which CI system we're in. + "TRAMPOLINE_CI" + # Indicates the version of the script. + "TRAMPOLINE_VERSION" +) + +log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" + +# Detect which CI systems we're in. If we're in any of the CI systems +# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be +# the name of the CI system. Both envvars will be passing down to the +# container for telling which CI system we're in. +if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then + # descriptive env var for indicating it's on CI. + RUNNING_IN_CI="true" + TRAMPOLINE_CI="kokoro" + if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then + if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then + log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." + exit 1 + fi + # This service account will be activated later. + TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" + else + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + gcloud auth list + fi + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet + fi + pass_down_envvars+=( + # KOKORO dynamic variables. + "KOKORO_BUILD_NUMBER" + "KOKORO_BUILD_ID" + "KOKORO_JOB_NAME" + "KOKORO_GIT_COMMIT" + "KOKORO_GITHUB_COMMIT" + "KOKORO_GITHUB_PULL_REQUEST_NUMBER" + "KOKORO_GITHUB_PULL_REQUEST_COMMIT" + # For FlakyBot + "KOKORO_GITHUB_COMMIT_URL" + "KOKORO_GITHUB_PULL_REQUEST_URL" + ) +elif [[ "${TRAVIS:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="travis" + pass_down_envvars+=( + "TRAVIS_BRANCH" + "TRAVIS_BUILD_ID" + "TRAVIS_BUILD_NUMBER" + "TRAVIS_BUILD_WEB_URL" + "TRAVIS_COMMIT" + "TRAVIS_COMMIT_MESSAGE" + "TRAVIS_COMMIT_RANGE" + "TRAVIS_JOB_NAME" + "TRAVIS_JOB_NUMBER" + "TRAVIS_JOB_WEB_URL" + "TRAVIS_PULL_REQUEST" + "TRAVIS_PULL_REQUEST_BRANCH" + "TRAVIS_PULL_REQUEST_SHA" + "TRAVIS_PULL_REQUEST_SLUG" + "TRAVIS_REPO_SLUG" + "TRAVIS_SECURE_ENV_VARS" + "TRAVIS_TAG" + ) +elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="github-workflow" + pass_down_envvars+=( + "GITHUB_WORKFLOW" + "GITHUB_RUN_ID" + "GITHUB_RUN_NUMBER" + "GITHUB_ACTION" + "GITHUB_ACTIONS" + "GITHUB_ACTOR" + "GITHUB_REPOSITORY" + "GITHUB_EVENT_NAME" + "GITHUB_EVENT_PATH" + "GITHUB_SHA" + "GITHUB_REF" + "GITHUB_HEAD_REF" + "GITHUB_BASE_REF" + ) +elif [[ "${CIRCLECI:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="circleci" + pass_down_envvars+=( + "CIRCLE_BRANCH" + "CIRCLE_BUILD_NUM" + "CIRCLE_BUILD_URL" + "CIRCLE_COMPARE_URL" + "CIRCLE_JOB" + "CIRCLE_NODE_INDEX" + "CIRCLE_NODE_TOTAL" + "CIRCLE_PREVIOUS_BUILD_NUM" + "CIRCLE_PROJECT_REPONAME" + "CIRCLE_PROJECT_USERNAME" + "CIRCLE_REPOSITORY_URL" + "CIRCLE_SHA1" + "CIRCLE_STAGE" + "CIRCLE_USERNAME" + "CIRCLE_WORKFLOW_ID" + "CIRCLE_WORKFLOW_JOB_ID" + "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" + "CIRCLE_WORKFLOW_WORKSPACE_ID" + ) +fi + +# Configure the service account for pulling the docker image. +function repo_root() { + local dir="$1" + while [[ ! -d "${dir}/.git" ]]; do + dir="$(dirname "$dir")" + done + echo "${dir}" +} + +# Detect the project root. In CI builds, we assume the script is in +# the git tree and traverse from there, otherwise, traverse from `pwd` +# to find `.git` directory. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + PROGRAM_PATH="$(realpath "$0")" + PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" +else + PROJECT_ROOT="$(repo_root $(pwd))" +fi + +log_yellow "Changing to the project root: ${PROJECT_ROOT}." +cd "${PROJECT_ROOT}" + +# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need +# to use this environment variable in `PROJECT_ROOT`. +if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then + + mkdir -p "${tmpdir}/gcloud" + gcloud_config_dir="${tmpdir}/gcloud" + + log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." + export CLOUDSDK_CONFIG="${gcloud_config_dir}" + + log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." + gcloud auth activate-service-account \ + --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet +fi + +required_envvars=( + # The basic trampoline configurations. + "TRAMPOLINE_IMAGE" + "TRAMPOLINE_BUILD_FILE" +) + +if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then + source "${PROJECT_ROOT}/.trampolinerc" +fi + +log_yellow "Checking environment variables." +for e in "${required_envvars[@]}" +do + if [[ -z "${!e:-}" ]]; then + log "Missing ${e} env var. Aborting." + exit 1 + fi +done + +# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 +# script: e.g. "github/repo-name/.kokoro/run_tests.sh" +TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" +log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" + +# ignore error on docker operations and test execution +set +e + +log_yellow "Preparing Docker image." +# We only download the docker image in CI builds. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # Download the docker image specified by `TRAMPOLINE_IMAGE` + + # We may want to add --max-concurrent-downloads flag. + + log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." + if docker pull "${TRAMPOLINE_IMAGE}"; then + log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="true" + else + log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="false" + fi +else + # For local run, check if we have the image. + if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then + has_image="true" + else + has_image="false" + fi +fi + + +# The default user for a Docker container has uid 0 (root). To avoid +# creating root-owned files in the build directory we tell docker to +# use the current user ID. +user_uid="$(id -u)" +user_gid="$(id -g)" +user_name="$(id -un)" + +# To allow docker in docker, we add the user to the docker group in +# the host os. +docker_gid=$(cut -d: -f3 < <(getent group docker)) + +update_cache="false" +if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then + # Build the Docker image from the source. + context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") + docker_build_flags=( + "-f" "${TRAMPOLINE_DOCKERFILE}" + "-t" "${TRAMPOLINE_IMAGE}" + "--build-arg" "UID=${user_uid}" + "--build-arg" "USERNAME=${user_name}" + ) + if [[ "${has_image}" == "true" ]]; then + docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") + fi + + log_yellow "Start building the docker image." + if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then + echo "docker build" "${docker_build_flags[@]}" "${context_dir}" + fi + + # ON CI systems, we want to suppress docker build logs, only + # output the logs when it fails. + if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + if docker build "${docker_build_flags[@]}" "${context_dir}" \ + > "${tmpdir}/docker_build.log" 2>&1; then + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + cat "${tmpdir}/docker_build.log" + fi + + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + log_yellow "Dumping the build logs:" + cat "${tmpdir}/docker_build.log" + exit 1 + fi + else + if docker build "${docker_build_flags[@]}" "${context_dir}"; then + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + exit 1 + fi + fi +else + if [[ "${has_image}" != "true" ]]; then + log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." + exit 1 + fi +fi + +# We use an array for the flags so they are easier to document. +docker_flags=( + # Remove the container after it exists. + "--rm" + + # Use the host network. + "--network=host" + + # Run in priviledged mode. We are not using docker for sandboxing or + # isolation, just for packaging our dev tools. + "--privileged" + + # Run the docker script with the user id. Because the docker image gets to + # write in ${PWD} you typically want this to be your user id. + # To allow docker in docker, we need to use docker gid on the host. + "--user" "${user_uid}:${docker_gid}" + + # Pass down the USER. + "--env" "USER=${user_name}" + + # Mount the project directory inside the Docker container. + "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" + "--workdir" "${TRAMPOLINE_WORKSPACE}" + "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" + + # Mount the temporary home directory. + "--volume" "${tmphome}:/h" + "--env" "HOME=/h" + + # Allow docker in docker. + "--volume" "/var/run/docker.sock:/var/run/docker.sock" + + # Mount the /tmp so that docker in docker can mount the files + # there correctly. + "--volume" "/tmp:/tmp" + # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR + # TODO(tmatsuo): This part is not portable. + "--env" "TRAMPOLINE_SECRET_DIR=/secrets" + "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" + "--env" "KOKORO_GFILE_DIR=/secrets/gfile" + "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" + "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" +) + +# Add an option for nicer output if the build gets a tty. +if [[ -t 0 ]]; then + docker_flags+=("-it") +fi + +# Passing down env vars +for e in "${pass_down_envvars[@]}" +do + if [[ -n "${!e:-}" ]]; then + docker_flags+=("--env" "${e}=${!e}") + fi +done + +# If arguments are given, all arguments will become the commands run +# in the container, otherwise run TRAMPOLINE_BUILD_FILE. +if [[ $# -ge 1 ]]; then + log_yellow "Running the given commands '" "${@:1}" "' in the container." + readonly commands=("${@:1}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" +else + log_yellow "Running the tests in a Docker container." + docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" +fi + + +test_retval=$? + +if [[ ${test_retval} -eq 0 ]]; then + log_green "Build finished with ${test_retval}" +else + log_red "Build finished with ${test_retval}" +fi + +# Only upload it when the test passes. +if [[ "${update_cache}" == "true" ]] && \ + [[ $test_retval == 0 ]] && \ + [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then + log_yellow "Uploading the Docker image." + if docker push "${TRAMPOLINE_IMAGE}"; then + log_green "Finished uploading the Docker image." + else + log_red "Failed uploading the Docker image." + fi + # Call trampoline_after_upload_hook if it's defined. + if function_exists trampoline_after_upload_hook; then + trampoline_after_upload_hook + fi + +fi + +exit "${test_retval}" diff --git a/.librarian/config.yaml b/.librarian/config.yaml new file mode 100644 index 000000000..111f94dd5 --- /dev/null +++ b/.librarian/config.yaml @@ -0,0 +1,6 @@ +global_files_allowlist: + # Allow the container to read and write the root `CHANGELOG.md` + # file during the `release` step to update the latest client library + # versions which are hardcoded in the file. + - path: "CHANGELOG.md" + permissions: "read-write" diff --git a/.librarian/generator-input/.repo-metadata.json b/.librarian/generator-input/.repo-metadata.json new file mode 100644 index 000000000..8d12e4cc0 --- /dev/null +++ b/.librarian/generator-input/.repo-metadata.json @@ -0,0 +1,18 @@ +{ + "name": "pubsub", + "name_pretty": "Google Cloud Pub/Sub", + "product_documentation": "https://cloud.google.com/pubsub/docs/", + "client_documentation": "https://cloud.google.com/python/docs/reference/pubsub/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559741", + "release_level": "stable", + "language": "python", + "repo": "googleapis/python-pubsub", + "distribution_name": "google-cloud-pubsub", + "api_id": "pubsub.googleapis.com", + "requires_billing": true, + "default_version": "v1", + "codeowner_team": "@googleapis/api-pubsub", + "api_shortname": "pubsub", + "library_type": "GAPIC_COMBO", + "api_description": "is designed to provide reliable, many-to-many, asynchronous messaging between applications. Publisher applications can send messages to a topic and other applications can subscribe to that topic to receive the messages. By decoupling senders and receivers, Google Cloud Pub/Sub allows developers to communicate between independently written applications." +} diff --git a/.librarian/generator-input/librarian.py b/.librarian/generator-input/librarian.py new file mode 100644 index 000000000..5e6af7955 --- /dev/null +++ b/.librarian/generator-input/librarian.py @@ -0,0 +1,352 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from pathlib import Path +import re +import shutil +import textwrap + +import synthtool as s +import synthtool.gcp as gcp +from synthtool.languages import python + +# ---------------------------------------------------------------------------- +# Copy the generated client from the owl-bot staging directory +# ---------------------------------------------------------------------------- + +clean_up_generated_samples = True + +# Load the default version defined in .repo-metadata.json. +default_version = json.load(open(".repo-metadata.json", "rt")).get( + "default_version" +) + +for library in s.get_staging_dirs(default_version): + if clean_up_generated_samples: + shutil.rmtree("samples/generated_samples", ignore_errors=True) + clean_up_generated_samples = False + + # DEFAULT SCOPES and SERVICE_ADDRESS are being used. so let's force them in. + s.replace( + library / f"google/pubsub_{library.name}/services/*er/*client.py", + r"""DEFAULT_ENDPOINT = \"pubsub\.googleapis\.com\"""", + """ + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', + ) + + SERVICE_ADDRESS = "pubsub.googleapis.com:443" + \"""The default address of the service.\""" + + \g<0>""", + ) + + # Modify GRPC options in transports. + count = s.replace( + [ + library / f"google/pubsub_{library.name}/services/*/transports/grpc*", + library / f"tests/unit/gapic/pubsub_{library.name}/*", + ], + "options=\[.*?\]", + """options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ]""", + flags=re.MULTILINE | re.DOTALL, + ) + + if count < 15: + raise Exception("Expected replacements for gRPC channel options not made.") + + # If the emulator is used, force an insecure gRPC channel to avoid SSL errors. + clients_to_patch = [ + library / f"google/pubsub_{library.name}/services/publisher/client.py", + library / f"google/pubsub_{library.name}/services/subscriber/client.py", + library / f"google/pubsub_{library.name}/services/schema_service/client.py", + ] + err_msg = ( + "Expected replacements for gRPC channel to use with the emulator not made." + ) + + count = s.replace(clients_to_patch, r"import os", "import functools\n\g<0>") + + if count < len(clients_to_patch): + raise Exception(err_msg) + + count = s.replace( + clients_to_patch, + f"from \.transports\.base", + "\nimport grpc\n\g<0>", + ) + + if count < len(clients_to_patch): + raise Exception(err_msg) + + # TODO(https://github.com/googleapis/python-pubsub/issues/1349): Move the emulator + # code below to test files. + count = s.replace( + clients_to_patch, + r"# initialize with the provided callable or the passed in class", + """\g<0> + + emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") + if emulator_host: + if issubclass(transport_init, type(self)._transport_registry["grpc"]): # type: ignore + channel = grpc.insecure_channel(target=emulator_host) + else: + channel = grpc.aio.insecure_channel(target=emulator_host) + transport_init = functools.partial(transport_init, channel=channel) + + """, + ) + + if count < len(clients_to_patch): + raise Exception(err_msg) + + # Monkey patch the streaming_pull() GAPIC method to disable pre-fetching stream + # results. + s.replace( + library / f"google/pubsub_{library.name}/services/subscriber/client.py", + ( + r"# Wrap the RPC method.*\n" + r"\s+# and friendly error.*\n" + r"\s+rpc = self\._transport\._wrapped_methods\[self\._transport\.streaming_pull\]" + ), + """ + # Wrappers in api-core should not automatically pre-fetch the first + # stream result, as this breaks the stream when re-opening it. + # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 + self._transport.streaming_pull._prefetch_first_result_ = False + + \g<0>""", + ) + + # Emit deprecation warning if return_immediately flag is set with synchronous pull. + s.replace( + library / f"google/pubsub_{library.name}/services/subscriber/*client.py", + r"from google.pubsub_v1 import gapic_version as package_version", + "import warnings\n\g<0>", + ) + + count = s.replace( + library / f"google/pubsub_{library.name}/services/subscriber/*client.py", + r""" + ([^\n\S]+(?:async\ )?def\ pull\(.*?->\ pubsub\.PullResponse:.*?) + ((?P[^\n\S]+)\#\ Wrap\ the\ RPC\ method) + """, + textwrap.dedent( + """ + \g<1> + \gif request.return_immediately: + \g warnings.warn( + \g "The return_immediately flag is deprecated and should be set to False.", + \g category=DeprecationWarning, + \g ) + + \g<2>""" + ), + flags=re.MULTILINE | re.DOTALL | re.VERBOSE, + ) + + if count != 2: + raise Exception("Too many or too few replacements in pull() methods.") + + # Silence deprecation warnings in pull() method flattened parameter tests. + s.replace( + library / f"tests/unit/gapic/pubsub_{library.name}/test_subscriber.py", + "import os", + "\g<0>\nimport warnings", + ) + + count = s.replace( + library / f"tests/unit/gapic/pubsub_{library.name}/test_subscriber.py", + textwrap.dedent( + r""" + ([^\n\S]+# Call the method with a truthy value for each flattened field, + [^\n\S]+# using the keyword arguments to the method\.) + \s+(client\.pull\(.*?\))""" + ), + """\n\g<1> + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + \g<2>""", + flags=re.MULTILINE | re.DOTALL, + ) + + if count < 1: + raise Exception("Catch warnings replacement failed.") + + count = s.replace( + library / f"tests/unit/gapic/pubsub_{library.name}/test_subscriber.py", + textwrap.dedent( + r""" + ([^\n\S]+# Call the method with a truthy value for each flattened field, + [^\n\S]+# using the keyword arguments to the method\.) + \s+response = (await client\.pull\(.*?\))""" + ), + """\n\g<1> + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + \g<2>""", + flags=re.MULTILINE | re.DOTALL, + ) + + if count < 1: + raise Exception("Catch warnings replacement failed.") + + # Make sure that client library version is present in user agent header. + count = s.replace( + [ + library + / f"google/pubsub_{library.name}/services/publisher/async_client.py", + library / f"google/pubsub_{library.name}/services/publisher/client.py", + library + / f"google/pubsub_{library.name}/services/publisher/transports/base.py", + library + / f"google/pubsub_{library.name}/services/schema_service/async_client.py", + library / f"google/pubsub_{library.name}/services/schema_service/client.py", + library + / f"google/pubsub_{library.name}/services/schema_service/transports/base.py", + library + / f"google/pubsub_{library.name}/services/subscriber/async_client.py", + library / f"google/pubsub_{library.name}/services/subscriber/client.py", + library + / f"google/pubsub_{library.name}/services/subscriber/transports/base.py", + ], + r"""gapic_version=package_version.__version__""", + "client_library_version=package_version.__version__", + ) + + if count < 1: + raise Exception("client_library_version replacement failed.") + + # Allow timeout to be an instance of google.api_core.timeout.* + count = s.replace( + library / f"google/pubsub_{library.name}/types/__init__.py", + r"from \.pubsub import \(", + "from typing import Union\n\n\g<0>", + ) + + if count < 1: + raise Exception("Catch timeout replacement 1 failed.") + + count = s.replace( + library / f"google/pubsub_{library.name}/types/__init__.py", + r"__all__ = \(\n", + textwrap.dedent( + '''\ + TimeoutType = Union[ + int, + float, + "google.api_core.timeout.ConstantTimeout", + "google.api_core.timeout.ExponentialTimeout", + ] + """The type of the timeout parameter of publisher client methods.""" + + \g<0> "TimeoutType",''' + ), + ) + + if count < 1: + raise Exception("Catch timeout replacement 2 failed.") + + count = s.replace( + library / f"google/pubsub_{library.name}/services/publisher/*client.py", + r"from google.api_core import retry as retries.*\n", + "\g<0>from google.api_core import timeout as timeouts # type: ignore\n", + ) + + if count < 1: + raise Exception("Catch timeout replacement 3 failed.") + + count = s.replace( + library / f"google/pubsub_{library.name}/services/publisher/*client.py", + f"from google\.pubsub_{library.name}\.types import pubsub", + f"\g<0>\nfrom google.pubsub_{library.name}.types import TimeoutType", + ) + + if count < 1: + raise Exception("Catch timeout replacement 4 failed.") + + count = s.replace( + library / f"google/pubsub_{library.name}/services/publisher/*client.py", + r"(\s+)timeout: Union\[float, object\] = gapic_v1.method.DEFAULT.*\n", + f"\g<1>timeout: TimeoutType = gapic_{library.name}.method.DEFAULT,", + ) + + if count < 1: + raise Exception("Catch timeout replacement 5 failed.") + + count = s.replace( + library / f"google/pubsub_{library.name}/services/publisher/*client.py", + r"([^\S\r\n]+)timeout \(float\): (.*)\n", + ("\g<1>timeout (TimeoutType):\n" "\g<1> \g<2>\n"), + ) + + if count < 1: + raise Exception("Catch timeout replacement 6 failed.") + + # Override the default max retry deadline for publisher methods. + count = s.replace( + library / f"google/pubsub_{library.name}/services/publisher/transports/base.py", + r"deadline=60\.0", + "deadline=600.0", + ) + if count < 9: + raise Exception( + "Default retry deadline not overriden for all publisher methods." + ) + + # The namespace package declaration in google/cloud/__init__.py should be excluded + # from coverage. + count = s.replace( + library / ".coveragerc", + "google/pubsub/__init__.py", + """google/cloud/__init__.py + google/pubsub/__init__.py""", + ) + + if count < 1: + raise Exception(".coveragerc replacement failed.") + + s.move([library], excludes=["noxfile.py", "README.rst", "docs/**/*", "setup.py", "testing/constraints-3.7.txt", "testing/constraints-3.8.txt"]) +s.remove_staging_dirs() + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- + +templated_files = gcp.CommonTemplates().py_library( + microgenerator=True, + samples=True, + cov_level=99, + versions=gcp.common.detect_versions(path="./google", default_first=True), + unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"], + unit_test_dependencies=["flaky"], + system_test_python_versions=["3.12"], + system_test_external_dependencies=["psutil","flaky"], +) +s.move(templated_files, excludes=[".coveragerc", ".github/**", "README.rst", "docs/**", ".kokoro/**"]) + +python.py_samples(skip_readmes=True) + +# run format session for all directories which have a noxfile +for noxfile in Path(".").glob("**/noxfile.py"): + s.shell.run(["nox", "-s", "blacken"], cwd=noxfile.parent, hide_output=False) diff --git a/.librarian/generator-input/noxfile.py b/.librarian/generator-input/noxfile.py new file mode 100644 index 000000000..fd552166c --- /dev/null +++ b/.librarian/generator-input/noxfile.py @@ -0,0 +1,556 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +FLAKE8_VERSION = "flake8==6.1.0" +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" +LINT_PATHS = ["google", "tests", "noxfile.py", "setup.py"] + +MYPY_VERSION = "mypy==1.10.0" + +DEFAULT_PYTHON_VERSION = "3.14" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", + "3.14", +] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [ + "flaky", +] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ + "psutil", + "flaky", +] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "mypy", + # https://github.com/googleapis/python-pubsub/pull/552#issuecomment-1016256936 + # "mypy_samples", # TODO: uncomment when the check passes + "docs", + "docfx", + "format", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def mypy(session): + """Run type checks with mypy.""" + session.install("-e", ".[all]") + session.install(MYPY_VERSION) + + # Version 2.1.1 of google-api-core version is the first type-checked release. + # Version 2.2.0 of google-cloud-core version is the first type-checked release. + session.install( + "google-api-core[grpc]>=2.1.1", "google-cloud-core>=2.2.0", "types-requests" + ) + + # Just install the type info directly, since "mypy --install-types" might + # require an additional pass. + # Exclude types-protobuf==4.24.0.20240106 + # See https://github.com/python/typeshed/issues/11254 + session.install("types-protobuf!=4.24.0.20240106", "types-setuptools") + + # TODO: Only check the hand-written layer, the generated code does not pass + # mypy checks yet. + # https://github.com/googleapis/gapic-generator-python/issues/1092 + # TODO: Re-enable mypy checks once we merge, since incremental checks are failing due to protobuf upgrade + # session.run("mypy", "-p", "google.cloud", "--exclude", "google/pubsub_v1/") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def mypy_samples(session): + """Run type checks with mypy.""" + + session.install("-e", ".[all]") + + session.install("pytest") + session.install(MYPY_VERSION) + + # Just install the type info directly, since "mypy --install-types" might + # require an additional pass. + session.install( + "types-mock", "types-protobuf", "types-setuptools", "types-requests" + ) + + session.run( + "mypy", + "--config-file", + str(CURRENT_DIRECTORY / "samples" / "snippets" / "mypy.ini"), + "--no-incremental", # Required by warn-unused-configs from mypy.ini to work + "samples/", + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install(FLAKE8_VERSION, BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ( + "3.11", + "3.12", + "3.13", + "3.14", + ): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google/cloud", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=99") + + session.run("coverage", "erase") + + +# py > 3.10 not supported yet +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +# py > 3.10 not supported yet +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.14") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ( + "3.11", + "3.12", + "3.13", + "3.14", + ): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/.librarian/generator-input/setup.py b/.librarian/generator-input/setup.py new file mode 100644 index 000000000..dd2809f82 --- /dev/null +++ b/.librarian/generator-input/setup.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-pubsub" + + +description = "Google Cloud Pub/Sub API client library" + +version = {} +with open(os.path.join(package_root, "google/pubsub/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "grpcio >= 1.51.3, < 2.0.0; python_version < '3.14'", # https://github.com/googleapis/python-pubsub/issues/609 + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + # google-api-core >= 1.34.0 is allowed in order to support google-api-core 1.x + "google-auth >= 2.14.1, <3.0.0", + "google-api-core[grpc] >= 1.34.0, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0", + "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'", + "proto-plus >= 1.25.0, < 2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, < 1.0.0", + "grpcio-status >= 1.33.2", + "opentelemetry-api <= 1.22.0; python_version<='3.7'", + "opentelemetry-api >= 1.27.0; python_version>='3.8'", + "opentelemetry-sdk <= 1.22.0; python_version<='3.7'", + "opentelemetry-sdk >= 1.27.0; python_version>='3.8'", +] +extras = {"libcst": "libcst >= 0.3.10"} +url = "https://github.com/googleapis/python-pubsub" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + install_requires=dependencies, + extras_require=extras, + python_requires=">=3.7", + include_package_data=True, + zip_safe=False, +) diff --git a/.librarian/state.yaml b/.librarian/state.yaml new file mode 100644 index 000000000..286fb52ef --- /dev/null +++ b/.librarian/state.yaml @@ -0,0 +1,37 @@ +image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:b8058df4c45e9a6e07f6b4d65b458d0d059241dd34c814f151c8bf6b89211209 +libraries: + - id: google-cloud-pubsub + version: 2.34.0 + last_generated_commit: 9fcfbea0aa5b50fa22e190faceb073d74504172b + apis: + - path: google/pubsub/v1 + service_config: pubsub_v1.yaml + source_roots: + - . + preserve_regex: [] + remove_regex: + - ^google/pubsub + - ^google/pubsub_v1 + - ^tests/unit/gapic + - ^tests/__init__.py + - ^tests/unit/__init__.py + - ^.coveragerc + - ^.flake8 + - ^.pre-commit-config.yaml + - ^.repo-metadata.json + - ^.trampolinerc + - ^LICENSE + - ^MANIFEST.in + - ^SECURITY.md + - ^mypy.ini + - ^noxfile.py + - ^owlbot.py + - ^renovate.json + - ^samples/AUTHORING_GUIDE.md + - ^samples/CONTRIBUTING.md + - ^samples/generated_samples + - ^scripts/fixup_pubsub_v1_keywords.py + - ^setup.py + - ^testing/constraints-3.9 + - ^testing/constraints-3.1 + tag_format: v{version} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..1d74695f7 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,31 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml +- repo: https://github.com/psf/black + rev: 23.7.0 + hooks: + - id: black +- repo: https://github.com/pycqa/flake8 + rev: 6.1.0 + hooks: + - id: flake8 diff --git a/.repo-metadata.json b/.repo-metadata.json index b21c198b1..8d12e4cc0 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -1,13 +1,18 @@ { - "name": "pubsub", - "name_pretty": "Google Cloud Pub/Sub", - "product_documentation": "https://cloud.google.com/pubsub/docs/", - "client_documentation": "https://googleapis.dev/python/pubsub/latest", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559741", - "release_level": "ga", - "language": "python", - "repo": "googleapis/python-pubsub", - "distribution_name": "google-cloud-pubsub", - "api_id": "pubsub.googleapis.com", - "requires_billing": true + "name": "pubsub", + "name_pretty": "Google Cloud Pub/Sub", + "product_documentation": "https://cloud.google.com/pubsub/docs/", + "client_documentation": "https://cloud.google.com/python/docs/reference/pubsub/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559741", + "release_level": "stable", + "language": "python", + "repo": "googleapis/python-pubsub", + "distribution_name": "google-cloud-pubsub", + "api_id": "pubsub.googleapis.com", + "requires_billing": true, + "default_version": "v1", + "codeowner_team": "@googleapis/api-pubsub", + "api_shortname": "pubsub", + "library_type": "GAPIC_COMBO", + "api_description": "is designed to provide reliable, many-to-many, asynchronous messaging between applications. Publisher applications can send messages to a topic and other applications can subscribe to that topic to receive the messages. By decoupling senders and receivers, Google Cloud Pub/Sub allows developers to communicate between independently written applications." } diff --git a/.trampolinerc b/.trampolinerc new file mode 100644 index 000000000..008015237 --- /dev/null +++ b/.trampolinerc @@ -0,0 +1,61 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Add required env vars here. +required_envvars+=( +) + +# Add env vars which are passed down into the container here. +pass_down_envvars+=( + "NOX_SESSION" + ############### + # Docs builds + ############### + "STAGING_BUCKET" + "V2_STAGING_BUCKET" + ################## + # Samples builds + ################## + "INSTALL_LIBRARY_FROM_SOURCE" + "RUN_TESTS_SESSION" + "BUILD_SPECIFIC_GCLOUD_PROJECT" + # Target directories. + "RUN_TESTS_DIRS" + # The nox session to run. + "RUN_TESTS_SESSION" +) + +# Prevent unintentional override on the default image. +if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ + [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." + exit 1 +fi + +# Define the default value if it makes sense. +if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then + TRAMPOLINE_IMAGE_UPLOAD="" +fi + +if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + TRAMPOLINE_IMAGE="" +fi + +if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then + TRAMPOLINE_DOCKERFILE="" +fi + +if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then + TRAMPOLINE_BUILD_FILE="" +fi diff --git a/CHANGELOG.md b/CHANGELOG.md index 939bfd5fc..bcf798e60 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,988 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history -### [1.6.1](https://www.github.com/googleapis/python-pubsub/compare/v1.6.0...v1.6.1) (2020-06-30) +## [2.34.0](https://github.com/googleapis/python-pubsub/compare/v2.33.0...v2.34.0) (2025-12-16) + + +### Features + +* support mTLS certificates when available (#1566) ([24761a2fedeb17f5af98a72a62306ad59306a553](https://github.com/googleapis/python-pubsub/commit/24761a2fedeb17f5af98a72a62306ad59306a553)) + + +## [2.33.0](https://github.com/googleapis/python-pubsub/compare/v2.32.0...v2.33.0) (2025-10-30) + + +### Features + +* Add AwsKinesisFailureReason.ApiViolationReason ([ac68093](https://github.com/googleapis/python-pubsub/commit/ac6809350758306f28fa1ab46939bc438b5a5e19)) +* Add tags to Subscription, Topic, and CreateSnapshotRequest messages for use in CreateSubscription, CreateTopic, and CreateSnapshot requests respectively ([ac68093](https://github.com/googleapis/python-pubsub/commit/ac6809350758306f28fa1ab46939bc438b5a5e19)) +* Annotate some resource fields with their corresponding API types ([ac68093](https://github.com/googleapis/python-pubsub/commit/ac6809350758306f28fa1ab46939bc438b5a5e19)) + + +### Bug Fixes + +* Deprecate credentials_file argument ([ac68093](https://github.com/googleapis/python-pubsub/commit/ac6809350758306f28fa1ab46939bc438b5a5e19)) + + +### Documentation + +* A comment for field `received_messages` in message `.google.pubsub.v1.StreamingPullResponse` is changed ([ac68093](https://github.com/googleapis/python-pubsub/commit/ac6809350758306f28fa1ab46939bc438b5a5e19)) + +## [2.32.0](https://github.com/googleapis/python-pubsub/compare/v2.31.1...v2.32.0) (2025-10-28) + + +### Features + +* Adds Python 3.14 support ([#1512](https://github.com/googleapis/python-pubsub/issues/1512)) ([95a2690](https://github.com/googleapis/python-pubsub/commit/95a26907efecfa5d56b140b7f833640b7fbb21d7)) +* Debug logs ([#1460](https://github.com/googleapis/python-pubsub/issues/1460)) ([b5d4a45](https://github.com/googleapis/python-pubsub/commit/b5d4a458ca9319bebbe3142a1f05d4d4471c8d4d)) +* Support the protocol version in StreamingPullRequest ([#1455](https://github.com/googleapis/python-pubsub/issues/1455)) ([e6294a1](https://github.com/googleapis/python-pubsub/commit/e6294a1883abf9809cb56d5cd4ad25cc501bc994)) + + +### Bug Fixes + +* Ignore future warnings on python versions ([#1546](https://github.com/googleapis/python-pubsub/issues/1546)) ([8e28dea](https://github.com/googleapis/python-pubsub/commit/8e28dea5b68fc940266d0b1a9f2a07a7b5f10b34)) + +## [2.31.1](https://github.com/googleapis/python-pubsub/compare/v2.31.0...v2.31.1) (2025-07-28) + + +### Bug Fixes + +* Change Log Severities for Terminated Streams ([#1433](https://github.com/googleapis/python-pubsub/issues/1433)) ([3a3aa79](https://github.com/googleapis/python-pubsub/commit/3a3aa79040d656a3391a153386ec662d002f9368)) +* Propagate Otel Context to Subscriber Callback if Provided ([#1429](https://github.com/googleapis/python-pubsub/issues/1429)) ([b0f6f49](https://github.com/googleapis/python-pubsub/commit/b0f6f49f65752e88523f9c4209366d2a18140416)) + +## [2.31.0](https://github.com/googleapis/python-pubsub/compare/v2.30.0...v2.31.0) (2025-06-26) + + +### Features + +* Add MessageTransformationFailureReason to IngestionFailureEvent ([#1427](https://github.com/googleapis/python-pubsub/issues/1427)) ([8ab13e1](https://github.com/googleapis/python-pubsub/commit/8ab13e1b71c151f0146548e7224dd38c9d719a88)) + + +### Bug Fixes + +* Surface Fatal Stream Errors to Future; Adjust Retryable Error Codes ([#1422](https://github.com/googleapis/python-pubsub/issues/1422)) ([e081beb](https://github.com/googleapis/python-pubsub/commit/e081beb29056035304d365ec9c50fa7ffbac6886)) + +## [2.30.0](https://github.com/googleapis/python-pubsub/compare/v2.29.1...v2.30.0) (2025-06-07) + + +### Features + +* Add SchemaViolationReason to IngestionFailureEvent ([#1411](https://github.com/googleapis/python-pubsub/issues/1411)) ([c046ca2](https://github.com/googleapis/python-pubsub/commit/c046ca22e9bddff6b50f7670bf6b9b9470bf78e8)) + +## [2.29.1](https://github.com/googleapis/python-pubsub/compare/v2.29.0...v2.29.1) (2025-05-23) + + +### Bug Fixes + +* Remove setup.cfg configuration for creating universal wheels ([#1376](https://github.com/googleapis/python-pubsub/issues/1376)) ([60639c4](https://github.com/googleapis/python-pubsub/commit/60639c4928105ae8a72c8e37b1f48f75cc2ffcc3)) + + +### Documentation + +* **sample:** Add samples for topic and subscription SMTs ([#1386](https://github.com/googleapis/python-pubsub/issues/1386)) ([4d072e0](https://github.com/googleapis/python-pubsub/commit/4d072e088b59f692dc3d59c3197a2993c125917e)) +* Update documentation for JavaScriptUDF to indicate that the `message_id` metadata field is optional instead of required ([#1380](https://github.com/googleapis/python-pubsub/issues/1380)) ([be90054](https://github.com/googleapis/python-pubsub/commit/be9005412fea06bea917c8b6861546b7e6c62a1e)) +* Update readme links ([#1409](https://github.com/googleapis/python-pubsub/issues/1409)) ([77ba05d](https://github.com/googleapis/python-pubsub/commit/77ba05d4ba5b84a25c1a07c5397bbc184fa6041d)) + +## [2.29.0](https://github.com/googleapis/python-pubsub/compare/v2.28.0...v2.29.0) (2025-03-19) + + +### Features + +* Add REST Interceptors which support reading metadata ([4363179](https://github.com/googleapis/python-pubsub/commit/43631790781ccfe071a7ecad41949399d3dbd063)) +* Add support for opt-in debug logging ([4363179](https://github.com/googleapis/python-pubsub/commit/43631790781ccfe071a7ecad41949399d3dbd063)) +* Deprecate `enabled` field for message transforms and add `disabled` field ([4363179](https://github.com/googleapis/python-pubsub/commit/43631790781ccfe071a7ecad41949399d3dbd063)) + + +### Bug Fixes + +* Allow logs to propagate upstream for caplog testing ([#1374](https://github.com/googleapis/python-pubsub/issues/1374)) ([fa39b0e](https://github.com/googleapis/python-pubsub/commit/fa39b0e87695da40036c1daec1b3108374672d61)) +* Allow Protobuf 6.x ([#1369](https://github.com/googleapis/python-pubsub/issues/1369)) ([c95b7a5](https://github.com/googleapis/python-pubsub/commit/c95b7a5bad7138a70e56c278970f5b54939a68f8)) +* Fix typing issue with gRPC metadata when key ends in -bin ([4363179](https://github.com/googleapis/python-pubsub/commit/43631790781ccfe071a7ecad41949399d3dbd063)) + + +### Documentation + +* A comment for field `code` in message `.google.pubsub.v1.JavaScriptUDF` is changed ([4363179](https://github.com/googleapis/python-pubsub/commit/43631790781ccfe071a7ecad41949399d3dbd063)) +* Add samples and test for ingestion from Kafka sources ([#1354](https://github.com/googleapis/python-pubsub/issues/1354)) ([820f986](https://github.com/googleapis/python-pubsub/commit/820f986104ca39fd0c92ba6816319e939be1ed63)) +* Deprecate `enabled` field for message transforms and add `disabled` field ([4363179](https://github.com/googleapis/python-pubsub/commit/43631790781ccfe071a7ecad41949399d3dbd063)) +* **samples:** Increase example max_bytes setting for cloud storage subscriptions to encourage more performant subscribe ([#1324](https://github.com/googleapis/python-pubsub/issues/1324)) ([cb760a7](https://github.com/googleapis/python-pubsub/commit/cb760a71cd4ad035d0c2c4c0f7b66bf52f18808c)) + +## [2.28.0](https://github.com/googleapis/python-pubsub/compare/v2.27.3...v2.28.0) (2025-01-30) + + +### Features + +* Add support for message transforms to Topic and Subscription ([#1274](https://github.com/googleapis/python-pubsub/issues/1274)) ([e5e2f3f](https://github.com/googleapis/python-pubsub/commit/e5e2f3f732f451d14dfb4c37ae979e5c04045305)) + + +### Bug Fixes + +* Get channel target for a gRPC request ([#1339](https://github.com/googleapis/python-pubsub/issues/1339)) ([16ea766](https://github.com/googleapis/python-pubsub/commit/16ea76611d121700a3f3119d18919063d12c81c1)) +* Set creds only if transport not provided ([#1348](https://github.com/googleapis/python-pubsub/issues/1348)) ([59965a4](https://github.com/googleapis/python-pubsub/commit/59965a4804a434467a47815cdbdd5ce31bbb3662)) + +## [2.27.3](https://github.com/googleapis/python-pubsub/compare/v2.27.2...v2.27.3) (2025-01-24) + + +### Bug Fixes + +* Stop using api_core default timeouts in publish since they are broken ([#1326](https://github.com/googleapis/python-pubsub/issues/1326)) ([ba2c2ee](https://github.com/googleapis/python-pubsub/commit/ba2c2eef7da89a3c14c14d9b6191cd8738c30341)) + +## [2.27.2](https://github.com/googleapis/python-pubsub/compare/v2.27.1...v2.27.2) (2025-01-06) + + +### Bug Fixes + +* Handle TransportError Exceptions thrown from gapic_publish ([#1318](https://github.com/googleapis/python-pubsub/issues/1318)) ([0e058c7](https://github.com/googleapis/python-pubsub/commit/0e058c73487384100847adcb2f4ab95a61c072c4)) + +## [2.27.1](https://github.com/googleapis/python-pubsub/compare/v2.27.0...v2.27.1) (2024-11-08) + + +### Bug Fixes + +* Add support for Python3.13 ([#1302](https://github.com/googleapis/python-pubsub/issues/1302)) ([ab22e27](https://github.com/googleapis/python-pubsub/commit/ab22e27954450b4e06ec98fe2e3458056aa8ca60)) + +## [2.27.0](https://github.com/googleapis/python-pubsub/compare/v2.26.1...v2.27.0) (2024-11-02) + + +### Features + +* Add support for Python 3.13 ([#1281](https://github.com/googleapis/python-pubsub/issues/1281)) ([0b46a33](https://github.com/googleapis/python-pubsub/commit/0b46a3321d6f19cd72e4f2ccdba73d062c7bd832)) + + +### Bug Fixes + +* Mark test_streaming_pull_max_messages flaky ([#1288](https://github.com/googleapis/python-pubsub/issues/1288)) ([d6635a0](https://github.com/googleapis/python-pubsub/commit/d6635a00dc2c614dd8608ef32ad4e79f9124e040)) + +## [2.26.1](https://github.com/googleapis/python-pubsub/compare/v2.26.0...v2.26.1) (2024-10-10) + + +### Documentation + +* Add ingestion from GCS sample ([#1273](https://github.com/googleapis/python-pubsub/issues/1273)) ([b59cc8d](https://github.com/googleapis/python-pubsub/commit/b59cc8d4fae593eb7592455a1696d7ab996a53dd)) + +## [2.26.0](https://github.com/googleapis/python-pubsub/compare/v2.25.2...v2.26.0) (2024-10-09) + + +### Features + +* Add ingestion Cloud Storage fields and Platform Logging fields to Topic ([#1248](https://github.com/googleapis/python-pubsub/issues/1248)) ([a7a4caa](https://github.com/googleapis/python-pubsub/commit/a7a4caaa5a73e9b15369471dc892688e24bf52e0)) + +## [2.25.2](https://github.com/googleapis/python-pubsub/compare/v2.25.1...v2.25.2) (2024-09-30) + + +### Documentation + +* Add command line args for OpenTelemetry Subscribe sample ([#1265](https://github.com/googleapis/python-pubsub/issues/1265)) ([0ff7f2a](https://github.com/googleapis/python-pubsub/commit/0ff7f2a64b5aa1b0e014e0933e4edaef0fb3f222)) + +## [2.25.1](https://github.com/googleapis/python-pubsub/compare/v2.25.0...v2.25.1) (2024-09-29) + + +### Bug Fixes + +* Update the requirements.txt for samples directory ([#1263](https://github.com/googleapis/python-pubsub/issues/1263)) ([5cce8b1](https://github.com/googleapis/python-pubsub/commit/5cce8b103ab7085613b7ee0efb5c8342d41ebae1)) + +## [2.25.0](https://github.com/googleapis/python-pubsub/compare/v2.24.0...v2.25.0) (2024-09-28) + + +### Features + +* Add OpenTelemetry publish sample ([#1258](https://github.com/googleapis/python-pubsub/issues/1258)) ([bc13ff0](https://github.com/googleapis/python-pubsub/commit/bc13ff05c3d1104c17169c360bdc09340430da37)) + +## [2.24.0](https://github.com/googleapis/python-pubsub/compare/v2.23.1...v2.24.0) (2024-09-24) + + +### Features + +* Add OpenTelemetry support for Subscribe Side ([#1252](https://github.com/googleapis/python-pubsub/issues/1252)) ([1b6f3d2](https://github.com/googleapis/python-pubsub/commit/1b6f3d284095e138943576de8551df263f73a506)) +* Open Telemetry Publish Side Support ([#1241](https://github.com/googleapis/python-pubsub/issues/1241)) ([bb5f3d1](https://github.com/googleapis/python-pubsub/commit/bb5f3d1a7df2d661cccc336edc8eceb2161c6921)) + + +### Bug Fixes + +* Fix flaky test ([#1254](https://github.com/googleapis/python-pubsub/issues/1254)) ([1ae49de](https://github.com/googleapis/python-pubsub/commit/1ae49de09996a5cf19f592f996c46e0222d540fc)) + +## [2.23.1](https://github.com/googleapis/python-pubsub/compare/v2.23.0...v2.23.1) (2024-09-09) + + +### Bug Fixes + +* Replace asserts with None checks for graceful shutdown ([#1244](https://github.com/googleapis/python-pubsub/issues/1244)) ([ced4f52](https://github.com/googleapis/python-pubsub/commit/ced4f527c7f918a87d1b89c2b5da59dbdf00e2c3)) + +## [2.23.0](https://github.com/googleapis/python-pubsub/compare/v2.22.0...v2.23.0) (2024-07-29) + + +### Features + +* Add max messages batching for Cloud Storage subscriptions ([#1224](https://github.com/googleapis/python-pubsub/issues/1224)) ([91c89d3](https://github.com/googleapis/python-pubsub/commit/91c89d36c5099591408ab0661c55929e786b1b04)) + +## [2.22.0](https://github.com/googleapis/python-pubsub/compare/v2.21.5...v2.22.0) (2024-07-06) + + +### Features + +* Add service_account_email for export subscriptions ([ec0cc34](https://github.com/googleapis/python-pubsub/commit/ec0cc349b344b6882979838171b6cae4209a9b02)) +* Add use_topic_schema for Cloud Storage Subscriptions ([ec0cc34](https://github.com/googleapis/python-pubsub/commit/ec0cc349b344b6882979838171b6cae4209a9b02)) + +## [2.21.5](https://github.com/googleapis/python-pubsub/compare/v2.21.4...v2.21.5) (2024-06-20) + + +### Bug Fixes + +* Allow Protobuf 5.x ([a369f04](https://github.com/googleapis/python-pubsub/commit/a369f04c46e4b3db34dcf8cc2ef7cda4ea491e26)) + +## [2.21.4](https://github.com/googleapis/python-pubsub/compare/v2.21.3...v2.21.4) (2024-06-18) + + +### Documentation + +* **samples:** Add code sample for optimistic subscribe ([#1182](https://github.com/googleapis/python-pubsub/issues/1182)) ([d8e8aa5](https://github.com/googleapis/python-pubsub/commit/d8e8aa59ab0288fdaf5a1cc5e476581e73d0f82c)) + +## [2.21.3](https://github.com/googleapis/python-pubsub/compare/v2.21.2...v2.21.3) (2024-06-10) + + +### Bug Fixes + +* Race condition where future callbacks invoked before client is in paused state ([#1145](https://github.com/googleapis/python-pubsub/issues/1145)) ([d12bac6](https://github.com/googleapis/python-pubsub/commit/d12bac6d94b337aa8978006600fb00e5b13d741d)) +* Suppress warnings caused during pytest runs ([#1189](https://github.com/googleapis/python-pubsub/issues/1189)) ([cd51149](https://github.com/googleapis/python-pubsub/commit/cd51149c9e0d3c59d1c75395c05308e860908bf9)) +* Typecheck errors in samples/snippets/subscriber.py ([#1186](https://github.com/googleapis/python-pubsub/issues/1186)) ([3698450](https://github.com/googleapis/python-pubsub/commit/3698450041cb4db0e2957832c24450f674b89c11)) + +## [2.21.2](https://github.com/googleapis/python-pubsub/compare/v2.21.1...v2.21.2) (2024-05-30) + + +### Bug Fixes + +* Test failures due to grpcio changes ([#1178](https://github.com/googleapis/python-pubsub/issues/1178)) ([086dd46](https://github.com/googleapis/python-pubsub/commit/086dd4660ec56d9ff2d41a32ec0b8e8dc44acc55)) + +## [2.21.1](https://github.com/googleapis/python-pubsub/compare/v2.21.0...v2.21.1) (2024-04-04) + + +### Bug Fixes + +* Set timeout to infinite for publishing with ordering keys enabled ([#1134](https://github.com/googleapis/python-pubsub/issues/1134)) ([67daf3c](https://github.com/googleapis/python-pubsub/commit/67daf3c64239d22eabe59c3df214057a4e59a39e)) + +## [2.21.0](https://github.com/googleapis/python-pubsub/compare/v2.20.3...v2.21.0) (2024-03-26) + + +### Features + +* Add custom datetime format for Cloud Storage subscriptions ([#1131](https://github.com/googleapis/python-pubsub/issues/1131)) ([4da6744](https://github.com/googleapis/python-pubsub/commit/4da67441ddab01a173620d8c03bc640271c785c6)) + +## [2.20.3](https://github.com/googleapis/python-pubsub/compare/v2.20.2...v2.20.3) (2024-03-21) + + +### Documentation + +* **samples:** Update Region Tags ([#1128](https://github.com/googleapis/python-pubsub/issues/1128)) ([e3bc89e](https://github.com/googleapis/python-pubsub/commit/e3bc89eaa51337c93144d6c3100486353d494ad9)) + +## [2.20.2](https://github.com/googleapis/python-pubsub/compare/v2.20.1...v2.20.2) (2024-03-15) + + +### Documentation + +* **samples:** Add Create Topic with Kinesis IngestionDataSourceSettings Sample ([#1120](https://github.com/googleapis/python-pubsub/issues/1120)) ([83dc9ff](https://github.com/googleapis/python-pubsub/commit/83dc9fff13aa35518fb9b6a73472816da852d975)) +* **samples:** Update Topic with Kinesis Ingestion Settings ([#1123](https://github.com/googleapis/python-pubsub/issues/1123)) ([e0e2d83](https://github.com/googleapis/python-pubsub/commit/e0e2d831da8d17288c3ae8900bea2388ce8758af)) + +## [2.20.1](https://github.com/googleapis/python-pubsub/compare/v2.20.0...v2.20.1) (2024-03-06) + + +### Bug Fixes + +* Catch and surface BaseException() ([#1108](https://github.com/googleapis/python-pubsub/issues/1108)) ([07e427f](https://github.com/googleapis/python-pubsub/commit/07e427f675464b9aa79c68dede67082529054980)) + +## [2.20.0](https://github.com/googleapis/python-pubsub/compare/v2.19.8...v2.20.0) (2024-03-05) + + +### Features + +* Add include_recaptcha_script for as a new action in firewall policies ([#1109](https://github.com/googleapis/python-pubsub/issues/1109)) ([54041a5](https://github.com/googleapis/python-pubsub/commit/54041a527398eb0ec5daa97a346ba3202ce349f3)) + + +### Documentation + +* **samples:** Correct type and description of `timeout` parameter in subscriber quickstart ([#1051](https://github.com/googleapis/python-pubsub/issues/1051)) ([141a473](https://github.com/googleapis/python-pubsub/commit/141a473561bd0e45d3137a02cbefddb454ab3af4)) + +## [2.19.8](https://github.com/googleapis/python-pubsub/compare/v2.19.7...v2.19.8) (2024-03-05) + + +### Bug Fixes + +* **deps:** Exclude google-auth 2.24.0 and 2.25.0 ([#1102](https://github.com/googleapis/python-pubsub/issues/1102)) ([165c983](https://github.com/googleapis/python-pubsub/commit/165c983803c48a17141765395cf9ec2e6a7056fa)) + +## [2.19.7](https://github.com/googleapis/python-pubsub/compare/v2.19.6...v2.19.7) (2024-02-24) + + +### Bug Fixes + +* **deps:** Require `google-api-core>=1.34.1` ([#1080](https://github.com/googleapis/python-pubsub/issues/1080)) ([1a5a134](https://github.com/googleapis/python-pubsub/commit/1a5a1342de8736c6a2b1ac63476667f8a02b5bb8)) + +## [2.19.6](https://github.com/googleapis/python-pubsub/compare/v2.19.5...v2.19.6) (2024-02-23) + + +### Bug Fixes + +* Remove LOGGER.exception() line ([#1087](https://github.com/googleapis/python-pubsub/issues/1087)) ([a395d26](https://github.com/googleapis/python-pubsub/commit/a395d26ed0fffaee8662f988da97dd35c480af4f)) + +## [2.19.5](https://github.com/googleapis/python-pubsub/compare/v2.19.4...v2.19.5) (2024-02-22) + + +### Bug Fixes + +* Update system_test_python_versions ([#1096](https://github.com/googleapis/python-pubsub/issues/1096)) ([c659ac7](https://github.com/googleapis/python-pubsub/commit/c659ac777f177e54d7272a8de93fa9f554b15d46)) + +## [2.19.4](https://github.com/googleapis/python-pubsub/compare/v2.19.3...v2.19.4) (2024-02-09) + + +### Bug Fixes + +* **diregapic:** S/bazel/bazelisk/ in DIREGAPIC build GitHub action ([#1064](https://github.com/googleapis/python-pubsub/issues/1064)) ([d56ad12](https://github.com/googleapis/python-pubsub/commit/d56ad12f197e9e379d2a4a0a38be108808985c23)) + +## [2.19.3](https://github.com/googleapis/python-pubsub/compare/v2.19.2...v2.19.3) (2024-02-08) + + +### Bug Fixes + +* Add google-auth as a direct dependency ([#1076](https://github.com/googleapis/python-pubsub/issues/1076)) ([5ce7301](https://github.com/googleapis/python-pubsub/commit/5ce7301b3056191203bc89bbcf1f33083de72a2d)) + +## [2.19.2](https://github.com/googleapis/python-pubsub/compare/v2.19.1...v2.19.2) (2024-02-08) + + +### Bug Fixes + +* Unit test failures in https://github.com/googleapis/python-pubsu… ([#1074](https://github.com/googleapis/python-pubsub/issues/1074)) ([3c6d128](https://github.com/googleapis/python-pubsub/commit/3c6d128a53d83439036aaec1f1fd48331152935b)) + +## [2.19.1](https://github.com/googleapis/python-pubsub/compare/v2.19.0...v2.19.1) (2024-02-02) + + +### Documentation + +* **samples:** Swap writer and reader schema to correct places ([265f410](https://github.com/googleapis/python-pubsub/commit/265f4106f499ec5d2d01a127ba192404c1836a28)) + +## [2.19.0](https://github.com/googleapis/python-pubsub/compare/v2.18.4...v2.19.0) (2023-12-10) + + +### Features + +* Add `use_table_schema` field to BigQueryConfig ([#1035](https://github.com/googleapis/python-pubsub/issues/1035)) ([ac6d912](https://github.com/googleapis/python-pubsub/commit/ac6d9126413b5c8e2b00727f7d74f03b7fb9d9ed)) +* Add support for Python 3.12 ([#1025](https://github.com/googleapis/python-pubsub/issues/1025)) ([660b8ea](https://github.com/googleapis/python-pubsub/commit/660b8eaf0daf975834a8333aedf8415867a4874d)) +* Introduce compatibility with native namespace packages ([#1024](https://github.com/googleapis/python-pubsub/issues/1024)) ([0432420](https://github.com/googleapis/python-pubsub/commit/0432420dcf18304dc1912075482eff0d2dc73009)) + + +### Bug Fixes + +* Use `retry_async` instead of `retry` in async client ([#1030](https://github.com/googleapis/python-pubsub/issues/1030)) ([05dd571](https://github.com/googleapis/python-pubsub/commit/05dd571760b71ae2930072f0677616dfc19d9511)) + +## [2.18.4](https://github.com/googleapis/python-pubsub/compare/v2.18.3...v2.18.4) (2023-09-09) + + +### Documentation + +* Minor formatting ([#988](https://github.com/googleapis/python-pubsub/issues/988)) ([4eea8c5](https://github.com/googleapis/python-pubsub/commit/4eea8c5c757da6800ba6958e4b8e66085b0e9ddb)) + +## [2.18.3](https://github.com/googleapis/python-pubsub/compare/v2.18.2...v2.18.3) (2023-08-18) + + +### Bug Fixes + +* Make retry policy back off more aggressively for RPCs that retry RESOURCE_EXHAUSTD ([#979](https://github.com/googleapis/python-pubsub/issues/979)) ([4073b3d](https://github.com/googleapis/python-pubsub/commit/4073b3dd6a6989e86d5e19bdb9b9c47ae2b0db87)) + +## [2.18.2](https://github.com/googleapis/python-pubsub/compare/v2.18.1...v2.18.2) (2023-08-07) + + +### Bug Fixes + +* Change retry multiplier from 1.3 to 4, for requests that retry Resour… ([#971](https://github.com/googleapis/python-pubsub/issues/971)) ([e4364d2](https://github.com/googleapis/python-pubsub/commit/e4364d2a061bb73fe3410d2ef213a04f3315e282)) + +## [2.18.1](https://github.com/googleapis/python-pubsub/compare/v2.18.0...v2.18.1) (2023-07-26) + + +### Documentation + +* Clarified where ordering_key will be written if write_metadata is set ([#965](https://github.com/googleapis/python-pubsub/issues/965)) ([3d95034](https://github.com/googleapis/python-pubsub/commit/3d95034f94426cdcf5b87323b9e463a7e8ce4f91)) + +## [2.18.0](https://github.com/googleapis/python-pubsub/compare/v2.17.1...v2.18.0) (2023-07-12) + + +### Features + +* Add push config wrapper fields ([#925](https://github.com/googleapis/python-pubsub/issues/925)) ([8e803cf](https://github.com/googleapis/python-pubsub/commit/8e803cf4ab136d606a0be459ab6d281b65560599)) + + +### Bug Fixes + +* Add async context manager return types ([#944](https://github.com/googleapis/python-pubsub/issues/944)) ([a3b2061](https://github.com/googleapis/python-pubsub/commit/a3b2061c4edf42123335fcfee6fcc4a44e90a5eb)) + + +### Documentation + +* Tightened requirements on cloud storage subscription filename suffixes ([#938](https://github.com/googleapis/python-pubsub/issues/938)) ([f54dcd0](https://github.com/googleapis/python-pubsub/commit/f54dcd0e7324218d87c37c0266c441a62012866d)) +* Update Community section in README.rst ([#945](https://github.com/googleapis/python-pubsub/issues/945)) ([dea258c](https://github.com/googleapis/python-pubsub/commit/dea258cff3ad19ffba67659bb03a2edcc44889d9)) + +## [2.17.1](https://github.com/googleapis/python-pubsub/compare/v2.17.0...v2.17.1) (2023-05-23) + + +### Documentation + +* Add attributes to pubsub_v1.types ([#921](https://github.com/googleapis/python-pubsub/issues/921)) ([4607dca](https://github.com/googleapis/python-pubsub/commit/4607dca983a8f5d4043c5661165da99453f2ef4a)) + +## [2.17.0](https://github.com/googleapis/python-pubsub/compare/v2.16.1...v2.17.0) (2023-05-12) + + +### Features + +* Add cloud storage subscription fields ([#918](https://github.com/googleapis/python-pubsub/issues/918)) ([6e262da](https://github.com/googleapis/python-pubsub/commit/6e262da9810f58f3f34b352e4771e084381ed0aa)) + +## [2.16.1](https://github.com/googleapis/python-pubsub/compare/v2.16.0...v2.16.1) (2023-05-05) + + +### Bug Fixes + +* Allow dropping cleaned-up keys ([#911](https://github.com/googleapis/python-pubsub/issues/911)) ([4b3157c](https://github.com/googleapis/python-pubsub/commit/4b3157ccb83771a2e613fc3475035f24d358ccf6)) + + +### Documentation + +* Add comment to setup.py ([#905](https://github.com/googleapis/python-pubsub/issues/905)) ([9825109](https://github.com/googleapis/python-pubsub/commit/9825109a826e63cd076c21367157be7a3c01c45b)) + +## [2.16.0](https://github.com/googleapis/python-pubsub/compare/v2.15.2...v2.16.0) (2023-04-06) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#863](https://github.com/googleapis/python-pubsub/issues/863)) ([a80c1d1](https://github.com/googleapis/python-pubsub/commit/a80c1d1f6f880cd13c247231bdc86c824edab8cb)) + + +### Documentation + +* Fix formatting of request arg in docstring ([#894](https://github.com/googleapis/python-pubsub/issues/894)) ([ee2ea73](https://github.com/googleapis/python-pubsub/commit/ee2ea7341268fd5428d98208b8af2fc96efe8d03)) + +## [2.15.2](https://github.com/googleapis/python-pubsub/compare/v2.15.1...v2.15.2) (2023-03-20) + + +### Documentation + +* Update missing docstrings ([#890](https://github.com/googleapis/python-pubsub/issues/890)) ([5849e04](https://github.com/googleapis/python-pubsub/commit/5849e048f48074e3a8ecddbe3bfbcfc9da094a28)) + +## [2.15.1](https://github.com/googleapis/python-pubsub/compare/v2.15.0...v2.15.1) (2023-03-14) + + +### Bug Fixes + +* Set x-goog-request-params for streaming pull request ([#884](https://github.com/googleapis/python-pubsub/issues/884)) ([0d247e6](https://github.com/googleapis/python-pubsub/commit/0d247e6b189409b4d57c95dbbbf3df3e0fac0fa2)) + +## [2.15.0](https://github.com/googleapis/python-pubsub/compare/v2.14.1...v2.15.0) (2023-02-22) + + +### Features + +* Add google.api.method.signature to update methods ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) +* Add temporary_failed_ack_ids to ModifyAckDeadlineConfirmation ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) + + +### Bug Fixes + +* Add service_yaml_parameters to py_gapic_library BUILD.bazel targets ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) +* Move global import in publisher sample ([#866](https://github.com/googleapis/python-pubsub/issues/866)) ([271a46d](https://github.com/googleapis/python-pubsub/commit/271a46d4da0c668674a36c0f58bbe0fe70985b75)) +* Port proto changes ([#871](https://github.com/googleapis/python-pubsub/issues/871)) ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) + + +### Documentation + +* Clarify BigQueryConfig PERMISSION_DENIED state ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) +* Clarify subscription description ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) +* Fix Pull description ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) +* Fix PullResponse description ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) +* Replacing HTML code with Markdown ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) +* Update Pub/Sub topic retention limit from 7 days to 31 days ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) + +## [2.14.1](https://github.com/googleapis/python-pubsub/compare/v2.14.0...v2.14.1) (2023-02-08) + + +### Bug Fixes + +* Add context manager return types ([4f690b9](https://github.com/googleapis/python-pubsub/commit/4f690b9287beefbca6505cf88637f4a8c5077152)) + + +### Documentation + +* Add documentation for enums ([4f690b9](https://github.com/googleapis/python-pubsub/commit/4f690b9287beefbca6505cf88637f4a8c5077152)) +* Mark revision_id in CommitSchemaRevisionRequest as deprecated ([#861](https://github.com/googleapis/python-pubsub/issues/861)) ([09b846d](https://github.com/googleapis/python-pubsub/commit/09b846ddd066519c0570522b8525ec5705714b0a)) + +## [2.14.0](https://github.com/googleapis/python-pubsub/compare/v2.13.12...v2.14.0) (2023-01-18) + + +### Features + +* Add schema evolution methods and fields ([9479356](https://github.com/googleapis/python-pubsub/commit/9479356029f28c565a06ab759330c6e430a47c51)) +* Add support for python 3.11 ([9479356](https://github.com/googleapis/python-pubsub/commit/9479356029f28c565a06ab759330c6e430a47c51)) + +## [2.13.12](https://github.com/googleapis/python-pubsub/compare/v2.13.11...v2.13.12) (2023-01-06) + + +### Bug Fixes + +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([060f00b](https://github.com/googleapis/python-pubsub/commit/060f00bcea5cd129be3a2d37078535cc97b4f5e8)) +* Drop usage of pkg_resources ([060f00b](https://github.com/googleapis/python-pubsub/commit/060f00bcea5cd129be3a2d37078535cc97b4f5e8)) +* Fix timeout default values ([060f00b](https://github.com/googleapis/python-pubsub/commit/060f00bcea5cd129be3a2d37078535cc97b4f5e8)) + + +### Documentation + +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([060f00b](https://github.com/googleapis/python-pubsub/commit/060f00bcea5cd129be3a2d37078535cc97b4f5e8)) + +## [2.13.11](https://github.com/googleapis/python-pubsub/compare/v2.13.10...v2.13.11) (2022-11-11) + + +### Bug Fixes + +* Remove suboptimal logic in leasing behavior ([#816](https://github.com/googleapis/python-pubsub/issues/816)) ([f067af3](https://github.com/googleapis/python-pubsub/commit/f067af348b8d3deb72981c58d942e887c0efb5ff)) + +## [2.13.10](https://github.com/googleapis/python-pubsub/compare/v2.13.8...v2.13.10) (2022-10-14) + + +### Bug Fixes + +* Batch at most 1,000 ack ids per request ([#802](https://github.com/googleapis/python-pubsub/issues/802)) ([4361e67](https://github.com/googleapis/python-pubsub/commit/4361e6735004a5600ee73979b99e6b9dd587c49b)) +* **deps:** Allow protobuf 3.19.5 ([#801](https://github.com/googleapis/python-pubsub/issues/801)) ([fa23503](https://github.com/googleapis/python-pubsub/commit/fa235033481783c2ec378b2a26b223bdff206461)) +* Silence invalid_ack_id warnings for receipt modacks ([#798](https://github.com/googleapis/python-pubsub/issues/798)) ([17feea5](https://github.com/googleapis/python-pubsub/commit/17feea5783f3a878b4dcfb3a8570585f7637378f)) + + +### Miscellaneous Chores + +* release as 2.13.10 ([34f022b](https://github.com/googleapis/python-pubsub/commit/34f022b4ee62d53a193bc2babafad508e2f2540b)) + +## [2.13.8](https://github.com/googleapis/python-pubsub/compare/v2.13.7...v2.13.8) (2022-10-03) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#792](https://github.com/googleapis/python-pubsub/issues/792)) ([1a54f7c](https://github.com/googleapis/python-pubsub/commit/1a54f7cd3d997270e0a5d70f7caea32d8753be76)) + +## [2.13.7](https://github.com/googleapis/python-pubsub/compare/v2.13.6...v2.13.7) (2022-09-22) + + +### Bug Fixes + +* Remove expired ack_ids ([#787](https://github.com/googleapis/python-pubsub/issues/787)) ([b4b809d](https://github.com/googleapis/python-pubsub/commit/b4b809d616cf93881815d6baadf2dd322ab566d1)) + +## [2.13.6](https://github.com/googleapis/python-pubsub/compare/v2.13.5...v2.13.6) (2022-08-11) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#762](https://github.com/googleapis/python-pubsub/issues/762)) ([260bd18](https://github.com/googleapis/python-pubsub/commit/260bd183ffe19992be9a1c1d298438c1f44d3fa9)) +* **deps:** require proto-plus >= 1.22.0 ([260bd18](https://github.com/googleapis/python-pubsub/commit/260bd183ffe19992be9a1c1d298438c1f44d3fa9)) +* set stream_ack_deadline to max_duration_per_lease_extension or 60 s, set ack_deadline to min_duration_per_lease_extension or 10 s ([#760](https://github.com/googleapis/python-pubsub/issues/760)) ([4444129](https://github.com/googleapis/python-pubsub/commit/4444129b28a19296752e865b73827b78e99adea5)) +* Update stream_ack_deadline with ack_deadline ([#763](https://github.com/googleapis/python-pubsub/issues/763)) ([e600ad8](https://github.com/googleapis/python-pubsub/commit/e600ad8228930445765ffa0c45500a7779e25817)) + +## [2.13.5](https://github.com/googleapis/python-pubsub/compare/v2.13.4...v2.13.5) (2022-08-10) + + +### Documentation + +* reorganize sphinx structure ([#751](https://github.com/googleapis/python-pubsub/issues/751)) ([b6de574](https://github.com/googleapis/python-pubsub/commit/b6de57458a1976a068dd229208b9b678a9d3f866)) + +## [2.13.4](https://github.com/googleapis/python-pubsub/compare/v2.13.3...v2.13.4) (2022-07-15) + + +### Bug Fixes + +* Remove bidi modacks on StreamingPull initial request ([#738](https://github.com/googleapis/python-pubsub/issues/738)) ([1e7d469](https://github.com/googleapis/python-pubsub/commit/1e7d46901c4472a3534980621e88d81aa2e50760)) + +## [2.13.3](https://github.com/googleapis/python-pubsub/compare/v2.13.2...v2.13.3) (2022-07-13) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#735](https://github.com/googleapis/python-pubsub/issues/735)) ([a5624fb](https://github.com/googleapis/python-pubsub/commit/a5624fbee2951c7f0c3e413d7d399a41fa0aa4bf)) + +## [2.13.2](https://github.com/googleapis/python-pubsub/compare/v2.13.1...v2.13.2) (2022-07-08) + + +### Bug Fixes + +* **deps:** require google-api-core >= 2.8.0 ([#726](https://github.com/googleapis/python-pubsub/issues/726)) ([c80ad41](https://github.com/googleapis/python-pubsub/commit/c80ad41abf36c709f8299a6fa22f3672705b1b6d)) + +## [2.13.1](https://github.com/googleapis/python-pubsub/compare/v2.13.0...v2.13.1) (2022-07-07) + + +### Bug Fixes + +* change info logs to debug ([#693](https://github.com/googleapis/python-pubsub/issues/693)) ([950fbce](https://github.com/googleapis/python-pubsub/commit/950fbce009fd56a55feea971f8e6083fa84d54fc)) +* require python 3.7+ ([#730](https://github.com/googleapis/python-pubsub/issues/730)) ([0d949b8](https://github.com/googleapis/python-pubsub/commit/0d949b8da096d1b0a5e26f607b1cd79fb560252a)) + +## [2.13.0](https://github.com/googleapis/python-pubsub/compare/v2.12.1...v2.13.0) (2022-06-06) + + +### Features + +* add BigQuery configuration for subscriptions ([#685](https://github.com/googleapis/python-pubsub/issues/685)) ([6fa03be](https://github.com/googleapis/python-pubsub/commit/6fa03be779d6a7105bb7c029b95d4c357d2a49df)) + + +### Bug Fixes + +* add info log for bidi streaming pull ack_deadline requests ([#692](https://github.com/googleapis/python-pubsub/issues/692)) ([fcb67dd](https://github.com/googleapis/python-pubsub/commit/fcb67dd0d8fff5a583ebe0a3a08d0219601df8e9)) +* **deps:** require protobuf <4.0.0dev ([#699](https://github.com/googleapis/python-pubsub/issues/699)) ([dcdf013](https://github.com/googleapis/python-pubsub/commit/dcdf0137905949662ce191adcb6dd588bd74f9fe)) + + +### Documentation + +* fix changelog header to consistent size ([#700](https://github.com/googleapis/python-pubsub/issues/700)) ([93f2b62](https://github.com/googleapis/python-pubsub/commit/93f2b62a18f622d8da71043a6b6d3f53295db308)) + +## [2.12.1](https://github.com/googleapis/python-pubsub/compare/v2.12.0...v2.12.1) (2022-05-11) + + +### Bug Fixes + +* Add emulator support to schema service ([#658](https://github.com/googleapis/python-pubsub/issues/658)) ([1a07d7c](https://github.com/googleapis/python-pubsub/commit/1a07d7ce3b3580191f74b7895dd1b8afb13baccb)) +* Handle duplicate acks with streaming pull ([#662](https://github.com/googleapis/python-pubsub/issues/662)) ([219491e](https://github.com/googleapis/python-pubsub/commit/219491ea1e615f33e1955e3afc204a0281c525db)) +* set min snooze on lease management to .01 sec ([#678](https://github.com/googleapis/python-pubsub/issues/678)) ([91c6e69](https://github.com/googleapis/python-pubsub/commit/91c6e69e96953919bc86004692edd3a52c7b9796)) + + +### Documentation + +* fix project_path typo in UPGRADING.md ([#660](https://github.com/googleapis/python-pubsub/issues/660)) ([20d661c](https://github.com/googleapis/python-pubsub/commit/20d661c8562cc1f777ac7b3f1ba03dcad7a831c0)) +* mark eod as preview ([#657](https://github.com/googleapis/python-pubsub/issues/657)) ([418e1a3](https://github.com/googleapis/python-pubsub/commit/418e1a3783441469713ca8ec8776007ff0fdb15d)) + +## [2.12.0](https://github.com/googleapis/python-pubsub/compare/v2.11.0...v2.12.0) (2022-04-06) + + +### Features + +* increase GRPC max metadata size to 4 MB ([#623](https://github.com/googleapis/python-pubsub/issues/623)) ([54b9e07](https://github.com/googleapis/python-pubsub/commit/54b9e07401b7309f16ecfe2a7afc36ea69f24a9c)) + + +### Bug Fixes + +* mypy errors ([#622](https://github.com/googleapis/python-pubsub/issues/622)) ([dab13d5](https://github.com/googleapis/python-pubsub/commit/dab13d5fb1d723c971cd84ae20f18462e624a26d)) +* process ErrorInfo / GRPC errors for ack/modack only when exactly-once delivery is enabled ([#626](https://github.com/googleapis/python-pubsub/issues/626)) ([cc1953b](https://github.com/googleapis/python-pubsub/commit/cc1953bcf942fb394a92ba50ba615adf822bfe7d)) + +## [2.11.0](https://github.com/googleapis/python-pubsub/compare/v2.10.0...v2.11.0) (2022-03-09) + + +### Features + +* retry temporary GRPC statuses for ack/modack/nack when exactly-once delivery is enabled ([#607](https://github.com/googleapis/python-pubsub/issues/607)) ([a91bed8](https://github.com/googleapis/python-pubsub/commit/a91bed829c9040fcc6c1e70b99b66188ac4ded40)) +* return singleton success future for exactly-once methods in Message ([#608](https://github.com/googleapis/python-pubsub/issues/608)) ([253ced2](https://github.com/googleapis/python-pubsub/commit/253ced28f308450c7a1a93cc38f6d101ecd7d4c0)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#600](https://github.com/googleapis/python-pubsub/issues/600)) ([1608b7f](https://github.com/googleapis/python-pubsub/commit/1608b7ffdd5b5db87e1e55fde763440ca9a4086e)) +* **deps:** require proto-plus>=1.15.0 ([1608b7f](https://github.com/googleapis/python-pubsub/commit/1608b7ffdd5b5db87e1e55fde763440ca9a4086e)) + +## [2.10.0](https://github.com/googleapis/python-pubsub/compare/v2.9.0...v2.10.0) (2022-03-04) + + +### Features + +* add api key support ([#571](https://github.com/googleapis/python-pubsub/issues/571)) ([cdda762](https://github.com/googleapis/python-pubsub/commit/cdda762f6d15d96f5e2d7fac975f3494dc49eaa9)) +* add exactly once delivery flag ([#577](https://github.com/googleapis/python-pubsub/issues/577)) ([d6614e2](https://github.com/googleapis/python-pubsub/commit/d6614e274328c58449e67dfc788e2e7986c0c10b)) +* add support for exactly once delivery ([#578](https://github.com/googleapis/python-pubsub/issues/578)) ([95a86fa](https://github.com/googleapis/python-pubsub/commit/95a86fa5f528701b760064f0cece0efa4e60cd44)) +* exactly-once delivery support ([#550](https://github.com/googleapis/python-pubsub/issues/550)) ([2fb6e15](https://github.com/googleapis/python-pubsub/commit/2fb6e1533192ae81dceee5c71283169a0a85a015)) + + +### Bug Fixes + +* **deps:** move libcst to extras ([#585](https://github.com/googleapis/python-pubsub/issues/585)) ([0846762](https://github.com/googleapis/python-pubsub/commit/084676243ca4afd54cda601e589b80883f9703a3)) +* refactor client classes for safer type checking ([#552](https://github.com/googleapis/python-pubsub/issues/552)) ([7f705be](https://github.com/googleapis/python-pubsub/commit/7f705beb927383f14b9d56f0341ee0de101f7c05)) +* resolve DuplicateCredentialArgs error when using credentials_file ([8ca8cf2](https://github.com/googleapis/python-pubsub/commit/8ca8cf27333baf823a1dffd081e63079f1a12625)) + + +### Samples +* samples: create subscription with filtering enabled [#580](https://github.com/googleapis/python-pubsub/pull/580) +* samples: handle empty response in sync pull samples [#586](https://github.com/googleapis/python-pubsub/pull/586) +* samples: sample for receiving messages with exactly-once delivery enabled [#588](https://github.com/googleapis/python-pubsub/pull/588) +* samples: create subscription with exactly once delivery [#592](https://github.com/googleapis/python-pubsub/pull/592) +(https://github.com/googleapis/python-pubsub/pull/588 + + +### Documentation + +* add autogenerated code snippets ([aa3754c](https://github.com/googleapis/python-pubsub/commit/aa3754cf432bd02be2734a23a32d5b36cd216aee)) +* Docs have inconsistent default values for max_latency and max_bytes ([#572](https://github.com/googleapis/python-pubsub/issues/572)) ([d136dfd](https://github.com/googleapis/python-pubsub/commit/d136dfdb69ebeebd1411a1415f863b94d07078f0)) + +## [2.9.0](https://www.github.com/googleapis/python-pubsub/compare/v2.8.0...v2.9.0) (2021-11-10) + + +### Features + +* add context manager support in client ([#516](https://www.github.com/googleapis/python-pubsub/issues/516)) ([51eae67](https://www.github.com/googleapis/python-pubsub/commit/51eae67c47e2ce7d2f7620209e98df4a129801b5)) +* add support for Python 3.10 ([#518](https://www.github.com/googleapis/python-pubsub/issues/518)) ([bb25d75](https://www.github.com/googleapis/python-pubsub/commit/bb25d755d70ba19e69d8a281be65f13eb994967d)) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([b72522a](https://www.github.com/googleapis/python-pubsub/commit/b72522a4617c4b2773fb6a5a631038791aa08300)) +* **deps:** drop packaging dependency ([290b9c5](https://www.github.com/googleapis/python-pubsub/commit/290b9c5615eaa03674b773a27b756483abd76195)) +* **deps:** require google-api-core >= 1.28.0 ([290b9c5](https://www.github.com/googleapis/python-pubsub/commit/290b9c5615eaa03674b773a27b756483abd76195)) +* improper types in pagers generation ([2ad639d](https://www.github.com/googleapis/python-pubsub/commit/2ad639d6370c7a085498595d7bd0d7eaadfff3c1)) + + +### Documentation + +* add type annotations to codebase ([#509](https://www.github.com/googleapis/python-pubsub/issues/509)) ([093cabf](https://www.github.com/googleapis/python-pubsub/commit/093cabff9f0464b1dfaa8f373b6fffbc439518de)) +* list oneofs in docstring ([290b9c5](https://www.github.com/googleapis/python-pubsub/commit/290b9c5615eaa03674b773a27b756483abd76195)) + +## [2.8.0](https://www.github.com/googleapis/python-pubsub/compare/v2.7.1...v2.8.0) (2021-09-02) + + +### Features + +* closed subscriber as context manager raises ([#488](https://www.github.com/googleapis/python-pubsub/issues/488)) ([a05a3f2](https://www.github.com/googleapis/python-pubsub/commit/a05a3f250cf8567ffe0d2eb3ecc45856a2bcd07c)) + + +### Documentation + +* clarify the types of Message parameters ([#486](https://www.github.com/googleapis/python-pubsub/issues/486)) ([633e91b](https://www.github.com/googleapis/python-pubsub/commit/633e91bbfc0a8f4f484089acff6812b754f40c75)) + +## [2.7.1](https://www.github.com/googleapis/python-pubsub/compare/v2.7.0...v2.7.1) (2021-08-13) + + +### Bug Fixes + +* remove dependency on pytz ([#472](https://www.github.com/googleapis/python-pubsub/issues/472)) ([972cc16](https://www.github.com/googleapis/python-pubsub/commit/972cc163f5a1477b37a5ab7e329faf1468637fa2)) + +## [2.7.0](https://www.github.com/googleapis/python-pubsub/compare/v2.6.1...v2.7.0) (2021-07-24) + + +### Features + +* Add `always_use_jwt_access`. ([1f30ef7](https://www.github.com/googleapis/python-pubsub/commit/1f30ef7f26ae1156751bc42305b1eb156115b5e5)) +* Add method signature for `Subscriber.Pull` without the deprecated `return_immediately` field. ([1f30ef7](https://www.github.com/googleapis/python-pubsub/commit/1f30ef7f26ae1156751bc42305b1eb156115b5e5)) +* Add Pub/Sub topic retention fields. ([#456](https://www.github.com/googleapis/python-pubsub/issues/456)) ([911829d](https://www.github.com/googleapis/python-pubsub/commit/911829d85c6ec36a87b873cbfe34497b1a493dde)) +* Add subscription properties to streaming pull response. ([1f30ef7](https://www.github.com/googleapis/python-pubsub/commit/1f30ef7f26ae1156751bc42305b1eb156115b5e5)) +* Support self-signed JWT flow for service accounts. ([1f30ef7](https://www.github.com/googleapis/python-pubsub/commit/1f30ef7f26ae1156751bc42305b1eb156115b5e5)) + + +### Bug Fixes + +* Add async client to `%name_%version/init.py`. ([1f30ef7](https://www.github.com/googleapis/python-pubsub/commit/1f30ef7f26ae1156751bc42305b1eb156115b5e5)) +* Disable `always_use_jwt_access`. ([1f30ef7](https://www.github.com/googleapis/python-pubsub/commit/1f30ef7f26ae1156751bc42305b1eb156115b5e5)) +* Enable self signed JWT for gRPC. ([#458](https://www.github.com/googleapis/python-pubsub/issues/458)) ([c6e0ff6](https://www.github.com/googleapis/python-pubsub/commit/c6e0ff69faeda614aa6088af59d3420e16720d27)) + +### Dependencies + +* Add `packaging` requirement. ([1f30ef7](https://www.github.com/googleapis/python-pubsub/commit/1f30ef7f26ae1156751bc42305b1eb156115b5e5)) +* Require `google-api-core >= 1.26.0`. ([1f30ef7](https://www.github.com/googleapis/python-pubsub/commit/1f30ef7f26ae1156751bc42305b1eb156115b5e5)) + +## 2.6.1 + +07-05-2021 10:33 PDT + +### Dependencies + +- Fix possible crash by requiring `grpcio >= 1.38.1`. ([#414](https://github.com/googleapis/python-pubsub/issues/414)) ([7037a28](https://github.com/googleapis/python-pubsub/pull/435/commits/7037a28090aa4efa01808231721716bca80bb0b7)) + +### Documentation + +- Adjust samples for publishing with error handler and flow control. ([#433](https://github.com/googleapis/python-pubsub/pull/433)) + +### Internal / Testing Changes + +- Fix flaky sync pull sample test. ([#434](https://github.com/googleapis/python-pubsub/pull/434)) +- Mitigate flaky snippets tests. ([#432](https://github.com/googleapis/python-pubsub/pull/432)) + +## [2.6.0](https://www.github.com/googleapis/python-pubsub/compare/v2.5.0...v2.6.0) (2021-06-17) + + +### Features + +* support customizable retry and timeout settings on the publisher client ([#299](https://www.github.com/googleapis/python-pubsub/issues/299)) ([7597604](https://www.github.com/googleapis/python-pubsub/commit/7597604b41fa3a1e9bf34addc35c8647dde007cc)) + + +### Bug Fixes + +* ACK deadline set for received messages can be too low ([#416](https://www.github.com/googleapis/python-pubsub/issues/416)) ([e907f6e](https://www.github.com/googleapis/python-pubsub/commit/e907f6e05f59f64a3b08df3304e92ec960997be6)) +* threads can skip the line in publisher flow controller ([#422](https://www.github.com/googleapis/python-pubsub/issues/422)) ([ef89f55](https://www.github.com/googleapis/python-pubsub/commit/ef89f55a41044e9ad26b91132b4b1be9c7b2c127)) + + +### Documentation + +* block until the streaming pull shuts down ([#424](https://www.github.com/googleapis/python-pubsub/issues/424)) ([d0d0b70](https://www.github.com/googleapis/python-pubsub/commit/d0d0b704642df8dee893d3f585aeb666e19696fb)) +* explain that future.cancel() is non-blocking ([#420](https://www.github.com/googleapis/python-pubsub/issues/420)) ([c825789](https://www.github.com/googleapis/python-pubsub/commit/c825789bdff310f44cbb132a723e99d1e6331d8f)) + +## [2.5.0](https://www.github.com/googleapis/python-pubsub/compare/v2.4.2...v2.5.0) (2021-05-18) + + +### Features + +* Make publish futures compatible with `concurrent.futures.as_completed()`. ([#397](https://www.github.com/googleapis/python-pubsub/issues/397)) ([e29a2c0](https://www.github.com/googleapis/python-pubsub/commit/e29a2c0ac6c5d2ebf2311646e552a02f184cfedc)) + + +### Bug Fixes + +* Scheduler errors when executor in shutdown. ([#399](https://www.github.com/googleapis/python-pubsub/issues/399)) ([39a83d3](https://www.github.com/googleapis/python-pubsub/commit/39a83d3eef196e88478ad8362201a2ab12e9f681)) + +## 2.4.2 + +05-06-2021 23:50 PDT + + +### Implementation Changes + +- Fix memory leak when publishing messages. ([#406](https://github.com/googleapis/python-pubsub/pull/406)) +- Do not crash if distribution cannot be found when extracting semantic version. ([#393](https://github.com/googleapis/python-pubsub/pull/393)) +- Emit a warning if `return_immediately` is set with synchronous pull. ([#355](https://github.com/googleapis/python-pubsub/pull/355)) +- Regenerate GAPIC layer with latest changes, use explicit default timeouts. ([#345](https://github.com/googleapis/python-pubsub/pull/345)) + + +### Documentation + +- Add additional info on `use_legacy_flow_control` parameter. ([#301](https://github.com/googleapis/python-pubsub/pull/301)) +- Remove EXPERIMENTAL tag for ordering keys in `publisher/client.py`. ([#324](https://github.com/googleapis/python-pubsub/pull/324)) +- Fix `create_topic()` call in README. ([#360](https://github.com/googleapis/python-pubsub/pull/360)) +- Generate PyPI token in secrets manager, fix spacing in docs (via synth). ([#384](https://github.com/googleapis/python-pubsub/pull/384)) +- Add `SECURITY.md`. ([#401](https://github.com/googleapis/python-pubsub/pull/401)) + + +### Internal / Testing Changes + +- Require 100% unit test coverage (via synth). ([#359](https://github.com/googleapis/python-pubsub/pull/359)) +- Bump test coverage to 100%. ([#364](https://github.com/googleapis/python-pubsub/pull/364)) +- Fix streaming pull close unit test flakiness. ([#361](https://github.com/googleapis/python-pubsub/pull/361)) +- Pass explicit credentials in all unit tests creating clients. ([#369](https://github.com/googleapis/python-pubsub/pull/369)) +- Fix flaky test for blocking pull shutdown. ([#378](https://github.com/googleapis/python-pubsub/pull/378)) +- Add missing licence header. ([#377](https://github.com/googleapis/python-pubsub/pull/377)) + +## [2.4.1](https://www.github.com/googleapis/python-pubsub/compare/v2.4.0...v2.4.1) (2021-03-30) + +### Bug Fixes + +* Move `await_msg_callbacks` flag to `subscribe()` method, fixing a regression in Pub/Sub Lite client. + ([#320](https://www.github.com/googleapis/python-pubsub/issues/320)) ([d40d027](https://www.github.com/googleapis/python-pubsub/commit/d40d02713c8c189937ae5c21d099b88a3131a59f)) +* SSL error when using the client with the emulator. ([#297](https://www.github.com/googleapis/python-pubsub/issues/297)) ([83db672](https://www.github.com/googleapis/python-pubsub/commit/83db67239d3521457138699109f766d574a0a2c4)) + +### Implementation Changes + +* (samples) Bump the max_time to 10 minutes for a flaky test. ([#311](https://www.github.com/googleapis/python-pubsub/issues/311)) ([e2678d4](https://www.github.com/googleapis/python-pubsub/commit/e2678d47c08e6b03782d2d744a4e630b933fdd51)), closes [#291](https://www.github.com/googleapis/python-pubsub/issues/291) +* (samples) Mark delivery attempts test as flaky. ([#326](https://www.github.com/googleapis/python-pubsub/issues/326)) ([5a97ef1](https://www.github.com/googleapis/python-pubsub/commit/5a97ef1bb7512fe814a8f72a43b3e9698434cd8d)) +* (samples) Mitigate flakiness in subscriber_tests. ([#304](https://www.github.com/googleapis/python-pubsub/issues/304)) ([271a385](https://www.github.com/googleapis/python-pubsub/commit/271a3856d835967f18f6becdae5ad53d585d0ccf)) +* (samples) Retry `InternalServerError` in dead letter policy test. ([#329](https://www.github.com/googleapis/python-pubsub/issues/329)) ([34c9b11](https://www.github.com/googleapis/python-pubsub/commit/34c9b11ae697c280f32642c3101b7f7da971f589)), closes [#321](https://www.github.com/googleapis/python-pubsub/issues/321) + +### Documentation + +* Remove EXPERIMENTAL tag for ordering keys in `types.py`. ([#323](https://www.github.com/googleapis/python-pubsub/issues/323)) ([659cd7a](https://www.github.com/googleapis/python-pubsub/commit/659cd7ae2784245d4217fbc722dac04bd3222d32)) +* Remove EXPERIMENTAL tag from `Schema` service (via synth). ([#307](https://www.github.com/googleapis/python-pubsub/issues/307)) ([ad85202](https://www.github.com/googleapis/python-pubsub/commit/ad852028836520db779c5cc33689ffd7e5458a7d)) + + +## 2.4.0 + +02-22-2021 05:02 PST + + +### Implementation Changes + +### New Features + +- Add graceful streaming pull shutdown. ([#292](https://github.com/googleapis/python-pubsub/pull/292)) + +### Documentation + +- Update samples with using the subscriber client as a context manager. ([#254](https://github.com/googleapis/python-pubsub/pull/254)) + +## [2.3.0](https://www.github.com/googleapis/python-pubsub/compare/v2.2.0...v2.3.0) (2021-02-08) + + +### Features + +* surface SchemaServiceClient in google.cloud.pubsub ([#281](https://www.github.com/googleapis/python-pubsub/issues/281)) ([8751bcc](https://www.github.com/googleapis/python-pubsub/commit/8751bcc5eb782df55769b48253629a3bde3d4661)) + + +### Bug Fixes + +* client version missing from the user agent header ([#275](https://www.github.com/googleapis/python-pubsub/issues/275)) ([b112f4f](https://www.github.com/googleapis/python-pubsub/commit/b112f4fcbf6f2bce8dcf37871bdc540b11f54fe3)) +* Don't open the google.cloud package by adding pubsub.py ([#269](https://www.github.com/googleapis/python-pubsub/issues/269)) ([542d79d](https://www.github.com/googleapis/python-pubsub/commit/542d79d7c5fb7403016150ba477485756cd4097b)) +* flaky samples tests ([#263](https://www.github.com/googleapis/python-pubsub/issues/263)) ([3d6a29d](https://www.github.com/googleapis/python-pubsub/commit/3d6a29de07cc09be663c90a3333f4cd33633994f)) +* Modify synth.py to update grpc transport options ([#266](https://www.github.com/googleapis/python-pubsub/issues/266)) ([41dcd30](https://www.github.com/googleapis/python-pubsub/commit/41dcd30636168f3dd1248f1d99170d531fc9bcb8)) +* pass anonymous credentials for emulator ([#250](https://www.github.com/googleapis/python-pubsub/issues/250)) ([8eed8e1](https://www.github.com/googleapis/python-pubsub/commit/8eed8e16019510dc8b20fb6b009d61a7ac532d26)) +* remove grpc send/recieve limits ([#259](https://www.github.com/googleapis/python-pubsub/issues/259)) ([fd2840c](https://www.github.com/googleapis/python-pubsub/commit/fd2840c10f92b03da7f4b40ac69c602220757c0a)) + +## [2.2.0](https://www.github.com/googleapis/python-pubsub/compare/v2.1.0...v2.2.0) (2020-11-16) + + +### Features + +* Add dead lettering max delivery attempts argument ([#236](https://www.github.com/googleapis/python-pubsub/issues/236)) ([7687ae5](https://www.github.com/googleapis/python-pubsub/commit/7687ae500bdb9c76e3ffb23302b4f32dc9627d81)) +* Enable server side flow control by default with the option to turn it off ([#231](https://www.github.com/googleapis/python-pubsub/issues/231)) ([94d738c](https://www.github.com/googleapis/python-pubsub/commit/94d738c07c6404a152c6729f5ba4b106b1fe9355)) + + +### Bug Fixes + +* fix mtls issue in handwritten layer ([#226](https://www.github.com/googleapis/python-pubsub/issues/226)) ([09a409c](https://www.github.com/googleapis/python-pubsub/commit/09a409c6240a74dcb46d8f3f86d4fb95a52274a7)) +* make fixup script consistent with migration docs ([#208](https://www.github.com/googleapis/python-pubsub/issues/208)) ([b64e218](https://www.github.com/googleapis/python-pubsub/commit/b64e2187ab0810437575580d6ddb5315ff60e274)) + + +### Documentation + +* document potentially unexpected blocking behavior of publish() method ([#214](https://www.github.com/googleapis/python-pubsub/issues/214)) ([b6d9bd7](https://www.github.com/googleapis/python-pubsub/commit/b6d9bd7c38d4fe597c25b7b5869fd4a1259c7687)) +* fix get topic_path in subscriber sample ([#210](https://www.github.com/googleapis/python-pubsub/issues/210)) ([7228f6c](https://www.github.com/googleapis/python-pubsub/commit/7228f6c9a4c050bf22bb4bc3582b89b04eaa8702)) + +## 2.1.0 + +09-21-2020 02:19 PDT + + +### Implementation Changes + +- Convert all RPC error types to exceptions. ([#163](https://github.com/googleapis/python-pubsub/issues/163)) ([#170](https://github.com/googleapis/python-pubsub/pull/170)) +- Pass client options to publisher and subscriber clients. ([#166](https://github.com/googleapis/python-pubsub/issues/166)) ([#190](https://github.com/googleapis/python-pubsub/pull/190)) + + +### New Features + +- Regenerate the client lib to pick new mtls env (via synth). ([#197](https://github.com/googleapis/python-pubsub/pull/197)) + + +### Documentation + +- Add subscription detachment sample. ([#152](https://github.com/googleapis/python-pubsub/pull/152)) +- Use new call syntax in subscriber docs. ([#198](https://github.com/googleapis/python-pubsub/issues/198)) ([#203](https://github.com/googleapis/python-pubsub/pull/203)) + + +### Internal / Testing Changes + +- Update CODEOWNERS. ([#193](https://github.com/googleapis/python-pubsub/pull/193)) + +## 2.0.0 + +09-11-2020 05:03 PDT + + +### Implementation Changes + +- Transition the library to microgenerator. ([#158](https://github.com/googleapis/python-pubsub/pull/158)) + This is a **breaking change** that introduces several **method signature changes** and **drops support + for Python 2.7 and 3.5**. + +### Documentation + +- Add samples for using ordering keys. ([#156](https://github.com/googleapis/python-pubsub/pull/156)) +- Remove extra white space in delivery attempt sample. ([#159](https://github.com/googleapis/python-pubsub/pull/159)) + +### Internal / Testing Changes + +- Fix flaky sequencer unit tests. ([#187](https://github.com/googleapis/python-pubsub/pull/187)) + +## [1.7.0](https://www.github.com/googleapis/python-pubsub/compare/v1.6.1...v1.7.0) (2020-07-13) + +This is the last release that supports Python 2.7 and 3.5. + +### New Features + +- Add support for server-side flow control. ([#143](https://github.com/googleapis/python-pubsub/pull/143)) ([04e261c](https://www.github.com/googleapis/python-pubsub/commit/04e261c602a2919cc75b3efa3dab099fb2cf704c)) + +### Dependencies + +- Update samples dependency `google-cloud-pubsub` to `v1.6.1`. ([#144](https://github.com/googleapis/python-pubsub/pull/144)) ([1cb6746](https://github.com/googleapis/python-pubsub/commit/1cb6746b00ebb23dbf1663bae301b32c3fc65a88)) + +### Documentation + +- Add pubsub/cloud-client samples from the common samples repo (with commit history). ([#151](https://github.com/googleapis/python-pubsub/pull/151)) +- Add flow control section to publish overview. ([#129](https://github.com/googleapis/python-pubsub/pull/129)) ([acc19eb](https://www.github.com/googleapis/python-pubsub/commit/acc19eb048eef067d9818ef3e310b165d9c6307e)) +- Add a link to Pub/Sub filtering language public documentation to `pubsub.proto`. ([#121](https://github.com/googleapis/python-pubsub/pull/121)) ([8802d81](https://www.github.com/googleapis/python-pubsub/commit/8802d8126247f22e26057e68a42f5b5a82dcbf0d)) + + +## [1.6.1](https://www.github.com/googleapis/python-pubsub/compare/v1.6.0...v1.6.1) (2020-06-30) ### Documentation diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index b3d1f6029..039f43681 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,44 +1,95 @@ -# Contributor Code of Conduct +# Code of Conduct -As contributors and maintainers of this project, -and in the interest of fostering an open and welcoming community, -we pledge to respect all people who contribute through reporting issues, -posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. +## Our Pledge -We are committed to making participation in this project -a harassment-free experience for everyone, -regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, -body size, race, ethnicity, age, religion, or nationality. +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, -such as physical or electronic -addresses, without explicit permission -* Other unethical or unprofessional conduct. +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct. -By adopting this Code of Conduct, -project maintainers commit themselves to fairly and consistently -applying these principles to every aspect of managing this project. -Project maintainers who do not follow or enforce the Code of Conduct -may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior -may be reported by opening an issue -or contacting one or more of the project maintainers. - -This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, -available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 7eb8028d5..417b1e9f8 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -21,8 +21,8 @@ In order to add a feature: - The feature must be documented in both the API and narrative documentation. -- The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. +- The feature must work fully on the following CPython versions: + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12, 3.13 and 3.14 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -50,9 +50,9 @@ You'll have to create a development environment using a Git checkout: # Configure remotes such that you can pull changes from the googleapis/python-pubsub # repository into your local repository. $ git remote add upstream git@github.com:googleapis/python-pubsub.git - # fetch and merge changes from upstream into master + # fetch and merge changes from upstream into main $ git fetch upstream - $ git merge upstream/master + $ git merge upstream/main Now your local repo is set up such that you will push changes to your GitHub repo, from which you can submit a pull request. @@ -68,10 +68,12 @@ Using ``nox`` We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: + $ nox -s unit + +- To run a single unit test:: + + $ nox -s unit-3.13 -- -k - $ nox -s unit-2.7 - $ nox -s unit-3.7 - $ ... .. note:: @@ -80,25 +82,6 @@ We use `nox `__ to instrument our tests. .. nox: https://pypi.org/project/nox/ -Note on Editable Installs / Develop Mode -======================================== - -- As mentioned previously, using ``setuptools`` in `develop mode`_ - or a ``pip`` `editable install`_ is not possible with this - library. This is because this library uses `namespace packages`_. - For context see `Issue #2316`_ and the relevant `PyPA issue`_. - - Since ``editable`` / ``develop`` mode can't be used, packages - need to be installed directly. Hence your changes to the source - tree don't get incorporated into the **already installed** - package. - -.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ -.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 -.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 -.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode -.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs - ***************************************** I'm getting weird errors... Can you help? ***************************************** @@ -112,8 +95,12 @@ On Debian/Ubuntu:: ************ Coding Style ************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: -- PEP8 compliance, with exceptions defined in the linter configuration. + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. If you have ``nox`` installed, you can test that you have not introduced any non-compliant code via:: @@ -123,12 +110,22 @@ Coding Style variables:: export GOOGLE_CLOUD_TESTING_REMOTE="upstream" - export GOOGLE_CLOUD_TESTING_BRANCH="master" + export GOOGLE_CLOUD_TESTING_BRANCH="main" By doing this, you are specifying the location of the most up-to-date - version of ``python-pubsub``. The the suggested remote name ``upstream`` - should point to the official ``googleapis`` checkout and the - the branch should be the main branch on that remote (``master``). + version of ``python-pubsub``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). + +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit Exceptions to PEP8: @@ -142,34 +139,23 @@ Running System Tests - To run system tests, you can execute:: - $ nox -s system-3.7 - $ nox -s system-2.7 + # Run all system tests + $ nox -s system + + # Run a single system test + $ nox -s system-3.12 -- -k + .. note:: - System tests are only configured to run under Python 2.7 and - Python 3.7. For expediency, we do not run them in older versions - of Python 3. + System tests are only configured to run under Python 3.12. + For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local auth settings and change some configuration in your project to run all the tests. -- System tests will be run against an actual project and - so you'll need to provide some environment variables to facilitate - authentication to your project: - - - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; - Such a file can be downloaded directly from the developer's console by clicking - "Generate new JSON key". See private key - `docs `__ - for more details. - -- Once you have downloaded your json keys, set the environment variable - ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file:: - - $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json" - +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. ************* Test Coverage @@ -191,6 +177,30 @@ Build the docs via: $ nox -s docs +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + ******************************************** Note About ``README`` as it pertains to PyPI ******************************************** @@ -199,7 +209,7 @@ The `description on PyPI`_ for the project comes directly from the ``README``. Due to the reStructuredText (``rst``) parser used by PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` instead of -``https://github.com/googleapis/python-pubsub/blob/master/CONTRIBUTING.rst``) +``https://github.com/googleapis/python-pubsub/blob/main/CONTRIBUTING.rst``) may cause problems creating links or rendering the description. .. _description on PyPI: https://pypi.org/project/google-cloud-pubsub @@ -211,25 +221,32 @@ Supported Python Versions We support: -- `Python 3.5`_ -- `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ +- `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ +- `Python 3.13`_ +- `Python 3.14`_ -.. _Python 3.5: https://docs.python.org/3.5/ -.. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ +.. _Python 3.13: https://docs.python.org/3.13/ +.. _Python 3.14: https://docs.python.org/3.14/ Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/python-pubsub/blob/master/noxfile.py +.. _config: https://github.com/googleapis/python-pubsub/blob/main/noxfile.py -Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. -We also explicitly decided to support Python 3 beginning with version -3.5. Reasons for this include: +We also explicitly decided to support Python 3 beginning with version 3.7. +Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_ diff --git a/LICENSE b/LICENSE index a8ee855de..d64569567 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,7 @@ - Apache License + + Apache License Version 2.0, January 2004 - https://www.apache.org/licenses/ + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +193,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - https://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/MANIFEST.in b/MANIFEST.in index e9e29d120..dae249ec8 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,25 +1,20 @@ # -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by synthtool. DO NOT EDIT! +# include README.rst LICENSE -recursive-include google *.json *.proto +recursive-include google *.py *.pyi *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ - -# Exclude scripts for samples readmegen -prune scripts/readme-gen \ No newline at end of file diff --git a/README.rst b/README.rst index a92a43087..97010e998 100644 --- a/README.rst +++ b/README.rst @@ -20,14 +20,14 @@ independently written applications. - `Client Library Documentation`_ .. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg - :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#general-availability + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-pubsub.svg :target: https://pypi.org/project/google-cloud-pubsub/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-pubsub.svg :target: https://pypi.org/project/google-cloud-pubsub/ .. _Google Cloud Pub / Sub: https://cloud.google.com/pubsub/ .. _Product Documentation: https://cloud.google.com/pubsub/docs -.. _Client Library Documentation: https://googleapis.dev/python/pubsub/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/pubsub/latest/summary_overview Quick Start ----------- @@ -60,11 +60,13 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.5 +Python >= 3.7 Deprecated Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. +Python <= 3.6. + +The last version of this library compatible with Python 2.7 is google-cloud-pubsub==1.7.0. Mac/Linux @@ -108,12 +110,13 @@ messages to it project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), topic='MY_TOPIC_NAME', # Set this to something appropriate. ) - publisher.create_topic(topic_name) - publisher.publish(topic_name, b'My first message!', spam='eggs') + publisher.create_topic(name=topic_name) + future = publisher.publish(topic_name, b'My first message!', spam='eggs') + future.result() To learn more, consult the `publishing documentation`_. -.. _publishing documentation: https://googleapis.dev/python/pubsub/latest +.. _publishing documentation: https://cloud.google.com/python/docs/reference/pubsub/latest/google.cloud.pubsub_v1.publisher.client.Client Subscribing @@ -127,23 +130,24 @@ the topic, and subscribe to that, passing a callback function. import os from google.cloud import pubsub_v1 - subscriber = pubsub_v1.SubscriberClient() topic_name = 'projects/{project_id}/topics/{topic}'.format( project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), topic='MY_TOPIC_NAME', # Set this to something appropriate. ) + subscription_name = 'projects/{project_id}/subscriptions/{sub}'.format( project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), sub='MY_SUBSCRIPTION_NAME', # Set this to something appropriate. ) - subscriber.create_subscription( - name=subscription_name, topic=topic_name) def callback(message): print(message.data) message.ack() - future = subscriber.subscribe(subscription_name, callback) + with pubsub_v1.SubscriberClient() as subscriber: + subscriber.create_subscription( + name=subscription_name, topic=topic_name) + future = subscriber.subscribe(subscription_name, callback) The future returned by the call to ``subscriber.subscribe`` can be used to block the current thread until a given condition obtains: @@ -158,7 +162,7 @@ block the current thread until a given condition obtains: It is also possible to pull messages in a synchronous (blocking) fashion. To learn more about subscribing, consult the `subscriber documentation`_. -.. _subscriber documentation: https://googleapis.dev/python/pubsub/latest +.. _subscriber documentation: https://cloud.google.com/python/docs/reference/pubsub/latest/google.cloud.pubsub_v1.subscriber.client.Client Authentication @@ -210,20 +214,17 @@ Contributions to this library are always welcome and highly encouraged. See the `CONTRIBUTING doc`_ for more information on how to get started. -.. _CONTRIBUTING doc: https://github.com/googleapis/google-cloud-python/blob/master/CONTRIBUTING.rst +.. _CONTRIBUTING doc: https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst Community --------- -Google Cloud Platform Python developers hang out in `Slack`_ in the ``#python`` -channel, click here to `get an invitation`_. +The best place to ask questions is via Stackoverflow: https://stackoverflow.com/questions/tagged/google-cloud-pubsub -.. _Slack: https://googlecloud-community.slack.com -.. _get an invitation: https://gcp-slack.appspot.com/ License ------- Apache 2.0 - See `the LICENSE`_ for more information. -.. _the LICENSE: https://github.com/googleapis/google-cloud-python/blob/master/LICENSE +.. _the LICENSE: https://github.com/googleapis/google-cloud-python/blob/main/LICENSE diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 000000000..8b58ae9c0 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/UPGRADING.md b/UPGRADING.md new file mode 100644 index 000000000..83081c1ac --- /dev/null +++ b/UPGRADING.md @@ -0,0 +1,201 @@ +# 2.0.0 Migration Guide + +The 2.0 release of the `google-cloud-pubsub` client is a significant upgrade based +on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), +and includes substantial interface changes. Existing code written for earlier versions +of this library will likely require updates to use this version. This document +describes the changes that have been made, and what you need to do to update your usage. + +If you experience issues or have questions, please file an +[issue](https://github.com/googleapis/python-pubsub/issues). + + +## Supported Python Versions + +> **WARNING**: Breaking change + +The 2.0.0 release requires Python 3.6+. + + +## Method Calls + +> **WARNING**: Breaking change + +Almost all methods that send requests to the backend expect request objects. We +provide a script that will convert most common use cases. + +* Install the library with the `libcst` extra. + +```py +python3 -m pip install google-cloud-pubsub[libcst] +``` + +* The script `fixup_pubsub_v1_keywords.py` is shipped with the library. It expects +an input directory (with the code to convert) and an empty destination directory. + +```sh +$ scripts/fixup_pubsub_v1_keywords.py --input-directory .samples/ --output-directory samples/ +``` + +**Before:** +```py +from google.cloud import pubsub + +publisher = pubsub.PublisherClient() + +project_path = "projects/{}".format(PROJECT_ID) +topics = publisher.list_topics(project_path) +``` + + +**After:** +```py +from google.cloud import pubsub + +publisher = pubsub.PublisherClient() + +project_path = f"projects/{PROJECT_ID}" +topics = publisher.list_topics(request={"project": project_path}) +``` + +### More Details + +In `google-cloud-pubsub<2.0.0`, parameters required by the API were positional +parameters and optional parameters were keyword parameters. + +**Before:** +```py + def list_topics( + self, + project, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): +``` + +In the 2.0.0 release, almost all methods that interact with the backend have a single +positional parameter `request`. Method docstrings indicate whether a parameter is +required or optional. + +> **NOTE:** The exception are hand written methods such as `publisher.publish()` and +> `subscriber.subscribe()` that implement additional logic (e.g. request batching) and +> sit on top of the API methods from the generated parts of the library. The signatures +> of these methods have in large part been preserved. + +Some methods have additional keyword only parameters. The available parameters depend +on the [`google.api.method_signature` annotation](https://github.com/googleapis/python-pubsub/blob/main/google/cloud/pubsub_v1/proto/pubsub.proto#L88) +specified by the API producer. + + +**After:** +```py + def list_topics( + self, + request: pubsub.ListTopicsRequest = None, + *, + project: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: google.pubsub_v1.types.TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicsPager: +``` + +> **NOTE:** The `request` parameter and flattened keyword parameters for the API are +> mutually exclusive. Passing both will result in an error. + + +Both of these calls are valid: + +```py +response = client.list_topics( + request={ + "project": project_path, + "metadata": [("foo", "bar"), ("baz", "quux")], + } +) +``` + +```py +response = client.list_topics( + project=project_path, + metadata=[("foo", "bar"), ("baz", "quux")], +) +``` + +This call is invalid because it mixes `request` with a keyword argument `metadata`. +Executing this code will result in an error: + +```py +response = client.synthesize_speech( + request={"project": project_path}, + metadata=[("foo", "bar"), ("baz", "quux")], +) +``` + +> **NOTE:** The `request` parameter of some methods can also contain a more rich set of +> options that are otherwise not available as explicit keyword only parameters, thus +> these _must_ be passed through `request`. + + +## Removed Utility Methods + +> **WARNING**: Breaking change + +Some utility methods such as publisher client's `subscription_path()` have been removed +and now only exist in the relevant client, e.g. `subscriber.subscription_path()`. + +The `project_path()` method has been removed from both the publisher and subscriber +client, this path must now be constructed manually: +```py +project_path = f"projects/{PROJECT_ID}" +``` + +## Removed `client_config` Parameter + +The publisher and subscriber clients cannot be constructed with `client_config` +argument anymore. If you want to customize retry and timeout settings for a particular +method, you need to do it upon method invocation by passing the custom `timeout` and +`retry` arguments, respectively. + + +## Custom Retry and Timeout settings for Publisher Client + +The ``publisher_options`` parameter to the Publisher Client, as well as all of the +client's methods, now accept custom retry and timeout settings: + +```py +custom_retry = api_core.retry.Retry( + initial=0.250, # seconds (default: 0.1) + maximum=90.0, # seconds (default: 60.0) + multiplier=1.45, # default: 1.3 + deadline=300.0, # seconds (default: 60.0) + predicate=api_core.retry.if_exception_type( + api_core.exceptions.Aborted, + api_core.exceptions.DeadlineExceeded, + api_core.exceptions.InternalServerError, + api_core.exceptions.ResourceExhausted, + api_core.exceptions.ServiceUnavailable, + api_core.exceptions.Unknown, + api_core.exceptions.Cancelled, + ), +) + +custom_timeout=api_core.timeout.ExponentialTimeout( + initial=1.0, + maximum=10.0, + multiplier=1.0, + deadline=300.0, +) + +publisher = pubsub_v1.PublisherClient( + publisher_options = pubsub_v1.types.PublisherOptions( + retry=custom_retry, + timeout=custom_timeout, + ), +) +``` + +The timeout can be either an instance of `google.api_core.timeout.ConstantTimeout`, +or an instance of `google.api_core.timeout.ExponentialTimeout`, as in the example. diff --git a/docs/UPGRADING.md b/docs/UPGRADING.md new file mode 120000 index 000000000..01097c8c0 --- /dev/null +++ b/docs/UPGRADING.md @@ -0,0 +1 @@ +../UPGRADING.md \ No newline at end of file diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 0abaf229f..b0a295464 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,4 +1,20 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/docs/conf.py b/docs/conf.py index fb9f1ca32..44d92cca7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,17 @@ # -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # google-cloud-pubsub documentation build configuration file # @@ -20,12 +33,16 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + __version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" +needs_sphinx = "1.5.5" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -35,6 +52,7 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", @@ -58,13 +76,13 @@ # The encoding of source files. # source_encoding = 'utf-8-sig' -# The master toctree document. -master_doc = "index" +# The root toctree document. +root_doc = "index" # General information about the project. -project = u"google-cloud-pubsub" -copyright = u"2019, Google" -author = u"Google APIs" +project = "google-cloud-pubsub" +copyright = "2019, Google" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -90,7 +108,13 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "**/.nox/**/*", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -256,9 +280,9 @@ # author, documentclass [howto, manual, or own class]). latex_documents = [ ( - master_doc, + root_doc, "google-cloud-pubsub.tex", - u"google-cloud-pubsub Documentation", + "google-cloud-pubsub Documentation", author, "manual", ) @@ -291,9 +315,9 @@ # (source start file, name, description, authors, manual section). man_pages = [ ( - master_doc, + root_doc, "google-cloud-pubsub", - u"google-cloud-pubsub Documentation", + "google-cloud-pubsub Documentation", [author], 1, ) @@ -310,9 +334,9 @@ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, + root_doc, "google-cloud-pubsub", - u"google-cloud-pubsub Documentation", + "google-cloud-pubsub Documentation", author, "google-cloud-pubsub", "google-cloud-pubsub Library", @@ -335,10 +359,15 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), - "grpc": ("https://grpc.io/grpc/python/", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/docs/index.rst b/docs/index.rst index ae3053625..daba0c7b3 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,15 +2,34 @@ .. include:: multiprocessing.rst + API Documentation ----------------- +.. note:: + + The client library version (currently ``2.x``) should not be confused with the + backend API version (currently ``v1``), hence some references to ``v1`` can be found + across the documentation. + .. toctree:: - :maxdepth: 3 + :maxdepth: 4 + + Publisher Client + Subscriber Client + Types + + +Migration Guide +--------------- + +See the guide below for instructions on migrating to the 2.x release of this library. + +.. toctree:: + :maxdepth: 2 + + UPGRADING - publisher/index - subscriber/index - types Changelog --------- @@ -22,3 +41,8 @@ For a list of all ``google-cloud-pubsub`` releases: changelog + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst index 1cb29d4ca..536d17b2e 100644 --- a/docs/multiprocessing.rst +++ b/docs/multiprocessing.rst @@ -1,7 +1,7 @@ .. note:: - Because this client uses :mod:`grpcio` library, it is safe to + Because this client uses :mod:`grpc` library, it is safe to share instances across threads. In multiprocessing scenarios, the best practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.Pool` or + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or :class:`multiprocessing.Process`. diff --git a/docs/publisher/api/client.rst b/docs/pubsub/publisher/api/client.rst similarity index 63% rename from docs/publisher/api/client.rst rename to docs/pubsub/publisher/api/client.rst index 47a3aa3d5..d1a54ff5e 100644 --- a/docs/publisher/api/client.rst +++ b/docs/pubsub/publisher/api/client.rst @@ -1,5 +1,5 @@ -Publisher Client API -==================== +Publisher Client API (v1) +========================= .. automodule:: google.cloud.pubsub_v1.publisher.client :members: diff --git a/docs/publisher/api/futures.rst b/docs/pubsub/publisher/api/futures.rst similarity index 100% rename from docs/publisher/api/futures.rst rename to docs/pubsub/publisher/api/futures.rst diff --git a/docs/pubsub/publisher/api/pagers.rst b/docs/pubsub/publisher/api/pagers.rst new file mode 100644 index 000000000..3bbfff33c --- /dev/null +++ b/docs/pubsub/publisher/api/pagers.rst @@ -0,0 +1,6 @@ +Pagers +====== + +.. automodule:: google.pubsub_v1.services.publisher.pagers + :members: + :inherited-members: diff --git a/docs/publisher/index.rst b/docs/pubsub/publisher/index.rst similarity index 61% rename from docs/publisher/index.rst rename to docs/pubsub/publisher/index.rst index 2a785359c..2a0ad320e 100644 --- a/docs/publisher/index.rst +++ b/docs/pubsub/publisher/index.rst @@ -33,7 +33,7 @@ Therefore, a very basic publishing call looks like: .. code-block:: python topic = 'projects/{project}/topics/{topic}' - publish_client.publish(topic, b'This is my message.') + future = publish_client.publish(topic, b'This is my message.') .. note:: @@ -52,7 +52,7 @@ If you want to include attributes, simply add keyword arguments: .. code-block:: python topic = 'projects/{project}/topics/{topic}' - publish_client.publish(topic, b'This is my message.', foo='bar') + future = publish_client.publish(topic, b'This is my message.', foo='bar') Batching @@ -72,7 +72,7 @@ The way that this works is that on the first message that you send, a new batch is created automatically. For every subsequent message, if there is already a valid batch that is still accepting messages, then that batch is used. When the batch is created, it begins a countdown that publishes the batch once -sufficient time has elapsed (by default, this is 0.05 seconds). +sufficient time has elapsed (by default, this is 0.01 seconds). If you need different batching settings, simply provide a :class:`~.pubsub_v1.types.BatchSettings` object when you instantiate the @@ -84,11 +84,23 @@ If you need different batching settings, simply provide a from google.cloud.pubsub import types client = pubsub.PublisherClient( - batch_settings=types.BatchSettings(max_messages=500), + batch_settings=types.BatchSettings( + max_messages=500, # default 100 + max_bytes=1024, # default 1 MB + max_latency=1 # default .01 seconds + ), ) -Pub/Sub accepts a maximum of 1,000 messages in a batch, and the size of a -batch can not exceed 10 megabytes. +The `max_bytes` argument is the maximum total size of the messages to collect +before automatically publishing the batch, (in bytes) including any byte size +overhead of the publish request itself. The maximum value is bound by the +server-side limit of 10_000_000 bytes. The default value is 1 MB. + +The `max_messages` argument is the maximum number of messages to collect +before automatically publishing the batch, the default value is 100 messages. + +The `max_latency` is the maximum number of seconds to wait for additional +messages before automatically publishing the batch, the default is .01 seconds. Futures @@ -128,6 +140,42 @@ You can also attach a callback to the future: future.add_done_callback(callback) +Publish Flow Control +-------------------- + +If publishing large amounts of messages or very large messages in quick +succession, some of the publish requests might time out, especially if the +bandwidth available is limited. To mitigate this the client can be +configured with custom :class:`~.pubsub_v1.types.PublishFlowControl` settings. + +You can configure the maximum desired number of messages and their maximum total +size, as well as the action that should be taken when the threshold is reached. + +.. code-block:: python + + from google.cloud import pubsub_v1 + + client = pubsub_v1.PublisherClient( + publisher_options=pubsub_v1.types.PublisherOptions( + flow_control=pubsub_v1.types.PublishFlowControl( + message_limit=500, + byte_limit=2 * 1024 * 1024, + limit_exceeded_behavior=pubsub_v1.types.LimitExceededBehavior.BLOCK, + ), + ), + ) + +The action to be taken on overflow can be one of the following: + +* :attr:`~.pubsub_v1.types.LimitExceededBehavior.IGNORE` (default): Ignore the + overflow and continue publishing the messages as normal. +* :attr:`~.pubsub_v1.types.LimitExceededBehavior.ERROR`: Raise + :exc:`~.pubsub_v1.publisher.exceptions.FlowControlLimitError` and reject the message. +* :attr:`~.pubsub_v1.types.LimitExceededBehavior.BLOCK`: Temporarily block in the + :meth:`~.pubsub_v1.publisher.client.Client.publish` method until there is + enough capacity available. + + API Reference ------------- @@ -136,3 +184,4 @@ API Reference api/client api/futures + api/pagers diff --git a/docs/subscriber/api/client.rst b/docs/pubsub/subscriber/api/client.rst similarity index 63% rename from docs/subscriber/api/client.rst rename to docs/pubsub/subscriber/api/client.rst index 965880c5a..d26243eba 100644 --- a/docs/subscriber/api/client.rst +++ b/docs/pubsub/subscriber/api/client.rst @@ -1,5 +1,5 @@ -Subscriber Client API -===================== +Subscriber Client API (v1) +========================== .. automodule:: google.cloud.pubsub_v1.subscriber.client :members: diff --git a/docs/subscriber/api/futures.rst b/docs/pubsub/subscriber/api/futures.rst similarity index 100% rename from docs/subscriber/api/futures.rst rename to docs/pubsub/subscriber/api/futures.rst diff --git a/docs/subscriber/api/message.rst b/docs/pubsub/subscriber/api/message.rst similarity index 100% rename from docs/subscriber/api/message.rst rename to docs/pubsub/subscriber/api/message.rst diff --git a/docs/pubsub/subscriber/api/pagers.rst b/docs/pubsub/subscriber/api/pagers.rst new file mode 100644 index 000000000..367c65ca7 --- /dev/null +++ b/docs/pubsub/subscriber/api/pagers.rst @@ -0,0 +1,6 @@ +Pagers +====== + +.. automodule:: google.pubsub_v1.services.subscriber.pagers + :members: + :inherited-members: diff --git a/docs/subscriber/api/scheduler.rst b/docs/pubsub/subscriber/api/scheduler.rst similarity index 100% rename from docs/subscriber/api/scheduler.rst rename to docs/pubsub/subscriber/api/scheduler.rst diff --git a/docs/subscriber/index.rst b/docs/pubsub/subscriber/index.rst similarity index 82% rename from docs/subscriber/index.rst rename to docs/pubsub/subscriber/index.rst index ed99566cd..aa21cd37b 100644 --- a/docs/subscriber/index.rst +++ b/docs/pubsub/subscriber/index.rst @@ -12,8 +12,9 @@ Instantiating a subscriber client is straightforward: .. code-block:: python from google.cloud import pubsub - subscriber = pubsub.SubscriberClient() + with pubsub.SubscriberClient() as subscriber: + # ... Creating a Subscription ----------------------- @@ -36,9 +37,15 @@ to subscribe to, and it must already exist. Once you have that, it is easy: # Substitute PROJECT, SUBSCRIPTION, and TOPIC with appropriate values for # your application. - sub_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) - topic_path = subscriber.topic_path(PROJECT, TOPIC) - subscriber.create_subscription(sub_path, topic_path) + + # from google.cloud import pubsub + # publisher = pubsub.PublisherClient() + + topic_path = publisher.topic_path(PROJECT, TOPIC) + + with pubsub.SubscriberClient() as subscriber: + sub_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) + subscriber.create_subscription(request={"name": sub_path, "topic": topic_path}) Once you have created a subscription (or if you already had one), the next step is to pull data from it. @@ -52,16 +59,28 @@ To pull the messages synchronously, use the client's .. code-block:: python + # Wrap the following code in `with pubsub.SubscriberClient() as subscriber:` + # Substitute PROJECT and SUBSCRIPTION with appropriate values for your # application. subscription_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) - response = subscriber.pull(subscription_path, max_messages=5) + response = subscriber.pull( + request={ + "subscription": subscription_path, + "max_messages": 5, + } + ) for msg in response.received_messages: print("Received message:", msg.message.data) ack_ids = [msg.ack_id for msg in response.received_messages] - subscriber.acknowledge(subscription_path, ack_ids) + subscriber.acknowledge( + request={ + "subscription": subscription_path, + "ack_ids": ack_ids, + } + ) The method returns a :class:`~.pubsub_v1.types.PullResponse` instance that contains a list of received :class:`~.pubsub_v1.types.ReceivedMessage` @@ -74,9 +93,17 @@ be dropped by this client and the backend will try to re-deliver them. .. code-block:: python + # Wrap the following code in `with pubsub.SubscriberClient() as subscriber:` + ack_ids = [] # TODO: populate with `ack_ids` of the messages to NACK ack_deadline_seconds = 0 - subscriber.modify_ack_deadline(subscription_path, ack_ids, ack_deadline_seconds) + subscriber.modify_ack_deadline( + request={ + "subscription": subscription_path, + "ack_ids": ack_ids, + "ack_deadline_seconds": ack_deadline_seconds, + } + ) Pulling a Subscription Asynchronously @@ -89,6 +116,8 @@ each message received. .. code-block:: python + # Wrap the following code in `with pubsub.SubscriberClient() as subscriber:` + # Substitute PROJECT and SUBSCRIPTION with appropriate values for your # application. subscription_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) @@ -127,6 +156,8 @@ Here is an example: do_something_with(message) # Replace this with your actual logic. message.ack() # Asynchronously acknowledge the message. + # Wrap the following code in `with pubsub.SubscriberClient() as subscriber:` + # Substitute PROJECT and SUBSCRIPTION with appropriate values for your # application. subscription_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) @@ -157,7 +188,8 @@ thread will be set on the future. try: future.result() except Exception as ex: - subscription.close() + # Close the subscriber if not using a context manager. + subscriber.close() raise Finally, you can use @@ -200,4 +232,5 @@ API Reference api/client api/message api/futures + api/pagers api/scheduler diff --git a/docs/types.rst b/docs/pubsub/types.rst similarity index 100% rename from docs/types.rst rename to docs/pubsub/types.rst diff --git a/docs/summary_overview.md b/docs/summary_overview.md new file mode 100644 index 000000000..171339711 --- /dev/null +++ b/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Google Cloud Pub/Sub API + +Overview of the APIs available for Google Cloud Pub/Sub API. + +## All entries + +Classes, methods and properties & attributes for +Google Cloud Pub/Sub API. + +[classes](https://cloud.google.com/python/docs/reference/pubsub/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/pubsub/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/pubsub/latest/summary_property.html) diff --git a/google/cloud/pubsub.py b/google/cloud/pubsub/__init__.py similarity index 91% rename from google/cloud/pubsub.py rename to google/cloud/pubsub/__init__.py index 3dc5fea84..2a6994231 100644 --- a/google/cloud/pubsub.py +++ b/google/cloud/pubsub/__init__.py @@ -19,6 +19,7 @@ from google.cloud.pubsub_v1 import PublisherClient from google.cloud.pubsub_v1 import SubscriberClient +from google.cloud.pubsub_v1 import SchemaServiceClient from google.cloud.pubsub_v1 import types @@ -26,4 +27,5 @@ "types", "PublisherClient", "SubscriberClient", + "SchemaServiceClient", ) diff --git a/google/cloud/pubsub_v1/__init__.py b/google/cloud/pubsub_v1/__init__.py index 67bec51b2..99bc3e9c8 100644 --- a/google/cloud/pubsub_v1/__init__.py +++ b/google/cloud/pubsub_v1/__init__.py @@ -17,6 +17,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import subscriber +from google.pubsub_v1.services import schema_service class PublisherClient(publisher.Client): @@ -27,4 +28,8 @@ class SubscriberClient(subscriber.Client): __doc__ = subscriber.Client.__doc__ -__all__ = ("types", "PublisherClient", "SubscriberClient") +class SchemaServiceClient(schema_service.client.SchemaServiceClient): + __doc__ = schema_service.client.SchemaServiceClient.__doc__ + + +__all__ = ("types", "PublisherClient", "SubscriberClient", "SchemaServiceClient") diff --git a/google/cloud/pubsub_v1/_gapic.py b/google/cloud/pubsub_v1/_gapic.py deleted file mode 100644 index da755dfbc..000000000 --- a/google/cloud/pubsub_v1/_gapic.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright 2019, Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import functools - - -def add_methods(source_class, blacklist=()): - """Add wrapped versions of the `api` member's methods to the class. - - Any methods passed in `blacklist` are not added. - Additionally, any methods explicitly defined on the wrapped class are - not added. - """ - - def wrap(wrapped_fx, lookup_fx): - """Wrap a GAPIC method; preserve its name and docstring.""" - # If this is a static or class method, then we do *not* - # send self as the first argument. - # - # For instance methods, we need to send self.api rather - # than self, since that is where the actual methods were declared. - - if isinstance(lookup_fx, (classmethod, staticmethod)): - fx = lambda *a, **kw: wrapped_fx(*a, **kw) # noqa - return staticmethod(functools.wraps(wrapped_fx)(fx)) - else: - fx = lambda self, *a, **kw: wrapped_fx(self.api, *a, **kw) # noqa - return functools.wraps(wrapped_fx)(fx) - - def actual_decorator(cls): - # Reflectively iterate over most of the methods on the source class - # (the GAPIC) and make wrapped versions available on this client. - for name in dir(source_class): - # Ignore all private and magic methods. - if name.startswith("_"): - continue - - # Ignore anything on our blacklist. - if name in blacklist: - continue - - # Retrieve the attribute, and ignore it if it is not callable. - attr = getattr(source_class, name) - if not callable(attr): - continue - - # Add a wrapper method to this object. - lookup_fx = source_class.__dict__[name] - fx = wrap(attr, lookup_fx) - - setattr(cls, name, fx) - - # Return the augmented class. - return cls - - # Simply return the actual decorator; this is returned from this method - # and actually used to decorate the class. - return actual_decorator diff --git a/google/cloud/pubsub_v1/futures.py b/google/cloud/pubsub_v1/futures.py index ba861e40c..5527d21d0 100644 --- a/google/cloud/pubsub_v1/futures.py +++ b/google/cloud/pubsub_v1/futures.py @@ -14,14 +14,13 @@ from __future__ import absolute_import -import threading -import uuid +import concurrent.futures +from typing import Any, NoReturn, Optional import google.api_core.future -from google.cloud.pubsub_v1.publisher import exceptions -class Future(google.api_core.future.Future): +class Future(concurrent.futures.Future, google.api_core.future.Future): """Encapsulation of the asynchronous execution of an action. This object is returned from asychronous Pub/Sub calls, and is the @@ -29,158 +28,29 @@ class Future(google.api_core.future.Future): This object should not be created directly, but is returned by other methods in this library. - - Args: - completed (Optional[Any]): An event, with the same interface as - :class:`threading.Event`. This is provided so that callers - with different concurrency models (e.g. ``threading`` or - ``multiprocessing``) can supply an event that is compatible - with that model. The ``wait()`` and ``set()`` methods will be - used. If this argument is not provided, then a new - :class:`threading.Event` will be created and used. """ - # This could be a sentinel object or None, but the sentinel object's ID - # can change if the process is forked, and None has the possibility of - # actually being a result. - _SENTINEL = uuid.uuid4() - - def __init__(self, completed=None): - self._result = self._SENTINEL - self._exception = self._SENTINEL - self._callbacks = [] - if completed is None: - completed = threading.Event() - self._completed = completed - - def cancel(self): - """Actions in Pub/Sub generally may not be canceled. - - This method always returns False. - """ - return False - - def cancelled(self): - """Actions in Pub/Sub generally may not be canceled. - - This method always returns False. - """ - return False - - def running(self): - """Actions in Pub/Sub generally may not be canceled. - - Returns: - bool: ``True`` if this method has not yet completed, or - ``False`` if it has completed. - """ + def running(self) -> bool: + """Return ``True`` if the associated Pub/Sub action has not yet completed.""" return not self.done() - def done(self): - """Return True the future is done, False otherwise. - - This still returns True in failure cases; checking :meth:`result` or - :meth:`exception` is the canonical way to assess success or failure. - """ - return self._exception != self._SENTINEL or self._result != self._SENTINEL - - def result(self, timeout=None): - """Resolve the future and return a value where appropriate. + def set_running_or_notify_cancel(self) -> NoReturn: + raise NotImplementedError( + "Only used by executors from `concurrent.futures` package." + ) - Args: - timeout (Union[int, float]): The number of seconds before this call - times out and raises TimeoutError. + def set_result(self, result: Any): + """Set the return value of work associated with the future. - Raises: - concurrent.futures.TimeoutError: If the request times out. - Exception: For undefined exceptions in the underlying - call execution. + Do not use this method, it should only be used internally by the library and its + unit tests. """ - # Attempt to get the exception if there is one. - # If there is not one, then we know everything worked, and we can - # return an appropriate value. - err = self.exception(timeout=timeout) - if err is None: - return self._result - raise err - - def exception(self, timeout=None): - """Return the exception raised by the call, if any. - - Args: - timeout (Union[int, float]): The number of seconds before this call - times out and raises TimeoutError. - - Raises: - concurrent.futures.TimeoutError: If the request times out. - - Returns: - Exception: The exception raised by the call, if any. - """ - # Wait until the future is done. - if not self._completed.wait(timeout=timeout): - raise exceptions.TimeoutError("Timed out waiting for result.") - - # If the batch completed successfully, this should return None. - if self._result != self._SENTINEL: - return None - - # Okay, this batch had an error; this should return it. - return self._exception - - def add_done_callback(self, callback): - """Attach the provided callable to the future. - - The provided function is called, with this future as its only argument, - when the future finishes running. - - Args: - callback (Callable): The function to call. - - Returns: - None - """ - if self.done(): - return callback(self) - self._callbacks.append(callback) - - def set_result(self, result): - """Set the result of the future to the provided result. - - Args: - result (Any): The result - """ - # Sanity check: A future can only complete once. - if self.done(): - raise RuntimeError("set_result can only be called once.") - - # Set the result and trigger the future. - self._result = result - self._trigger() - - def set_exception(self, exception): - """Set the result of the future to the given exception. - - Args: - exception (:exc:`Exception`): The exception raised. - """ - # Sanity check: A future can only complete once. - if self.done(): - raise RuntimeError("set_exception can only be called once.") - - # Set the exception and trigger the future. - self._exception = exception - self._trigger() - - def _trigger(self): - """Trigger all callbacks registered to this Future. + return super().set_result(result=result) - This method is called internally by the batch once the batch - completes. + def set_exception(self, exception: Optional[BaseException]): + """Set the result of the future as being the given exception. - Args: - message_id (str): The message ID, as a string. + Do not use this method, it should only be used internally by the library and its + unit tests. """ - self._completed.set() - for callback in self._callbacks: - callback(self) + return super().set_exception(exception=exception) diff --git a/google/cloud/pubsub_v1/gapic/publisher_client.py b/google/cloud/pubsub_v1/gapic/publisher_client.py deleted file mode 100644 index e8853d841..000000000 --- a/google/cloud/pubsub_v1/gapic/publisher_client.py +++ /dev/null @@ -1,1292 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.pubsub.v1 Publisher API.""" - -import collections -from copy import deepcopy -import functools -import pkg_resources -import six -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.path_template -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.cloud.pubsub_v1.gapic import publisher_client_config -from google.cloud.pubsub_v1.gapic.transports import publisher_grpc_transport -from google.cloud.pubsub_v1.proto import pubsub_pb2 -from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import iam_policy_pb2_grpc -from google.iam.v1 import options_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-pubsub",).version - - -# TODO: remove conditional import after Python 2 support is dropped -if six.PY2: - from collections import Mapping -else: - from collections.abc import Mapping - - -def _merge_dict(d1, d2): - # Modifies d1 in-place to take values from d2 - # if the nested keys from d2 are present in d1. - # https://stackoverflow.com/a/10704003/4488789 - for k, v2 in d2.items(): - v1 = d1.get(k) # returns None if v1 has no such key - if v1 is None: - raise Exception("{} is not recognized by client_config".format(k)) - if isinstance(v1, Mapping) and isinstance(v2, Mapping): - _merge_dict(v1, v2) - else: - d1[k] = v2 - return d1 - - -class PublisherClient(object): - """ - The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - SERVICE_ADDRESS = "pubsub.googleapis.com:443" - """The default address of the service.""" - - # The scopes needed to make gRPC calls to all of the methods defined in - # this service - _DEFAULT_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ) - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.pubsub.v1.Publisher" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PublisherClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project, - ) - - @classmethod - def subscription_path(cls, project, subscription): - """Return a fully-qualified subscription string.""" - return google.api_core.path_template.expand( - "projects/{project}/subscriptions/{subscription}", - project=project, - subscription=subscription, - ) - - @classmethod - def topic_path(cls, project, topic): - """Return a fully-qualified topic string.""" - return google.api_core.path_template.expand( - "projects/{project}/topics/{topic}", project=project, topic=topic, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.PublisherGrpcTransport, - Callable[[~.Credentials, type], ~.PublisherGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - default_client_config = deepcopy(publisher_client_config.config) - - if client_config is None: - client_config = default_client_config - else: - client_config = _merge_dict(default_client_config, client_config) - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=publisher_grpc_transport.PublisherGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = publisher_grpc_transport.PublisherGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def create_topic( - self, - name, - labels=None, - message_storage_policy=None, - kms_key_name=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates the given topic with the given name. See the resource name - rules. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> name = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> response = client.create_topic(name) - - Args: - name (str): Required. The name of the topic. It must have the format - ``"projects/{project}/topics/{topic}"``. ``{topic}`` must start with a - letter, and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), - dashes (``-``), underscores (``_``), periods (``.``), tildes (``~``), - plus (``+``) or percent signs (``%``). It must be between 3 and 255 - characters in length, and it must not start with ``"goog"``. - labels (dict[str -> str]): See Creating and - managing labels. - message_storage_policy (Union[dict, ~google.cloud.pubsub_v1.types.MessageStoragePolicy]): Policy constraining the set of Google Cloud Platform regions where messages - published to the topic may be stored. If not present, then no constraints - are in effect. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.MessageStoragePolicy` - kms_key_name (str): The resource name of the Cloud KMS CryptoKey to be used to protect - access to messages published on this topic. - - The expected format is - ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Topic` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_topic" not in self._inner_api_calls: - self._inner_api_calls[ - "create_topic" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_topic, - default_retry=self._method_configs["CreateTopic"].retry, - default_timeout=self._method_configs["CreateTopic"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.Topic( - name=name, - labels=labels, - message_storage_policy=message_storage_policy, - kms_key_name=kms_key_name, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_topic"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_topic( - self, - topic, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates an existing topic. Note that certain properties of a - topic are not modifiable. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic_name = 'projects/my-project/topics/my-topic' - >>> topic_labels = {'source': 'external'} - >>> topic = {'name': topic_name, 'labels': topic_labels} - >>> - >>> paths_element = 'labels' - >>> paths = [paths_element] - >>> update_mask = {'paths': paths} - >>> - >>> response = client.update_topic(topic, update_mask) - - Args: - topic (Union[dict, ~google.cloud.pubsub_v1.types.Topic]): Required. The updated topic object. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Topic` - update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Required. Indicates which fields in the provided topic to update. - Must be specified and non-empty. Note that if ``update_mask`` contains - "message_storage_policy" but the ``message_storage_policy`` is not set - in the ``topic`` provided above, then the updated value is determined by - the policy configured at the project or organization level. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Topic` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_topic" not in self._inner_api_calls: - self._inner_api_calls[ - "update_topic" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_topic, - default_retry=self._method_configs["UpdateTopic"].retry, - default_timeout=self._method_configs["UpdateTopic"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.UpdateTopicRequest(topic=topic, update_mask=update_mask,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic.name", topic.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_topic"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def publish( - self, - topic, - messages, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the - topic does not exist. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> data = b'' - >>> messages_element = {'data': data} - >>> messages = [messages_element] - >>> - >>> response = client.publish(topic, messages) - - Args: - topic (str): Required. The messages in the request will be published on this - topic. Format is ``projects/{project}/topics/{topic}``. - messages (list[Union[dict, ~google.cloud.pubsub_v1.types.PubsubMessage]]): Required. The messages to publish. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.PubsubMessage` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.PublishResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "publish" not in self._inner_api_calls: - self._inner_api_calls[ - "publish" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.publish, - default_retry=self._method_configs["Publish"].retry, - default_timeout=self._method_configs["Publish"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.PublishRequest(topic=topic, messages=messages,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic", topic)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["publish"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_topic( - self, - topic, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the configuration of a topic. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> response = client.get_topic(topic) - - Args: - topic (str): Required. The name of the topic to get. Format is - ``projects/{project}/topics/{topic}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Topic` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_topic" not in self._inner_api_calls: - self._inner_api_calls[ - "get_topic" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_topic, - default_retry=self._method_configs["GetTopic"].retry, - default_timeout=self._method_configs["GetTopic"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.GetTopicRequest(topic=topic,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic", topic)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_topic"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_topics( - self, - project, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists matching topics. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> project = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_topics(project): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_topics(project).pages: - ... for element in page: - ... # process element - ... pass - - Args: - project (str): Required. The name of the project in which to list topics. Format is - ``projects/{project-id}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.GRPCIterator` instance. - An iterable of :class:`~google.cloud.pubsub_v1.types.Topic` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_topics" not in self._inner_api_calls: - self._inner_api_calls[ - "list_topics" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_topics, - default_retry=self._method_configs["ListTopics"].retry, - default_timeout=self._method_configs["ListTopics"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ListTopicsRequest(project=project, page_size=page_size,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project", project)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_topics"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="topics", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def list_topic_subscriptions( - self, - topic, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the names of the attached subscriptions on this topic. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> # Iterate over all results - >>> for element in client.list_topic_subscriptions(topic): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_topic_subscriptions(topic).pages: - ... for element in page: - ... # process element - ... pass - - Args: - topic (str): Required. The name of the topic that subscriptions are attached to. - Format is ``projects/{project}/topics/{topic}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.GRPCIterator` instance. - An iterable of :class:`str` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_topic_subscriptions" not in self._inner_api_calls: - self._inner_api_calls[ - "list_topic_subscriptions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_topic_subscriptions, - default_retry=self._method_configs["ListTopicSubscriptions"].retry, - default_timeout=self._method_configs["ListTopicSubscriptions"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ListTopicSubscriptionsRequest( - topic=topic, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic", topic)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_topic_subscriptions"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="subscriptions", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def list_topic_snapshots( - self, - topic, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the names of the snapshots on this topic. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> # Iterate over all results - >>> for element in client.list_topic_snapshots(topic): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_topic_snapshots(topic).pages: - ... for element in page: - ... # process element - ... pass - - Args: - topic (str): Required. The name of the topic that snapshots are attached to. - Format is ``projects/{project}/topics/{topic}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.GRPCIterator` instance. - An iterable of :class:`str` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_topic_snapshots" not in self._inner_api_calls: - self._inner_api_calls[ - "list_topic_snapshots" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_topic_snapshots, - default_retry=self._method_configs["ListTopicSnapshots"].retry, - default_timeout=self._method_configs["ListTopicSnapshots"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ListTopicSnapshotsRequest( - topic=topic, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic", topic)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_topic_snapshots"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="snapshots", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_topic( - self, - topic, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes the topic with the given name. Returns ``NOT_FOUND`` if the - topic does not exist. After a topic is deleted, a new topic may be - created with the same name; this is an entirely new topic with none of - the old configuration or subscriptions. Existing subscriptions to this - topic are not deleted, but their ``topic`` field is set to - ``_deleted-topic_``. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> client.delete_topic(topic) - - Args: - topic (str): Required. Name of the topic to delete. Format is - ``projects/{project}/topics/{topic}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_topic" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_topic" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_topic, - default_retry=self._method_configs["DeleteTopic"].retry, - default_timeout=self._method_configs["DeleteTopic"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.DeleteTopicRequest(topic=topic,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic", topic)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_topic"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def set_iam_policy( - self, - resource, - policy, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Sets the access control policy on the specified resource. Replaces - any existing policy. - - Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` - errors. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `policy`: - >>> policy = {} - >>> - >>> response = client.set_iam_policy(resource, policy) - - Args: - resource (str): REQUIRED: The resource for which the policy is being specified. - See the operation documentation for the appropriate value for this field. - policy (Union[dict, ~google.cloud.pubsub_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The - size of the policy is limited to a few 10s of KB. An empty policy is a - valid policy but certain Cloud Platform services (such as Projects) - might reject them. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Policy` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "set_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "set_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.set_iam_policy, - default_retry=self._method_configs["SetIamPolicy"].retry, - default_timeout=self._method_configs["SetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["set_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_iam_policy( - self, - resource, - options_=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the access control policy for a resource. Returns an empty policy - if the resource exists and does not have a policy set. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> response = client.get_iam_policy(resource) - - Args: - resource (str): REQUIRED: The resource for which the policy is being requested. - See the operation documentation for the appropriate value for this field. - options_ (Union[dict, ~google.cloud.pubsub_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to - ``GetIamPolicy``. This field is only used by Cloud IAM. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.GetPolicyOptions` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "get_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_iam_policy, - default_retry=self._method_configs["GetIamPolicy"].retry, - default_timeout=self._method_configs["GetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def test_iam_permissions( - self, - resource, - permissions, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns permissions that a caller has on the specified resource. If the - resource does not exist, this will return an empty set of - permissions, not a `NOT_FOUND` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `permissions`: - >>> permissions = [] - >>> - >>> response = client.test_iam_permissions(resource, permissions) - - Args: - resource (str): REQUIRED: The resource for which the policy detail is being requested. - See the operation documentation for the appropriate value for this field. - permissions (list[str]): The set of permissions to check for the ``resource``. Permissions - with wildcards (such as '*' or 'storage.*') are not allowed. For more - information see `IAM - Overview `__. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.TestIamPermissionsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "test_iam_permissions" not in self._inner_api_calls: - self._inner_api_calls[ - "test_iam_permissions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.test_iam_permissions, - default_retry=self._method_configs["TestIamPermissions"].retry, - default_timeout=self._method_configs["TestIamPermissions"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["test_iam_permissions"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def detach_subscription( - self, - subscription, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Detaches a subscription from this topic. All messages retained in - the subscription are dropped. Subsequent ``Pull`` and ``StreamingPull`` - requests will return FAILED_PRECONDITION. If the subscription is a push - subscription, pushes to the endpoint will stop. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> response = client.detach_subscription(subscription) - - Args: - subscription (str): Required. The subscription to detach. Format is - ``projects/{project}/subscriptions/{subscription}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.DetachSubscriptionResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "detach_subscription" not in self._inner_api_calls: - self._inner_api_calls[ - "detach_subscription" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.detach_subscription, - default_retry=self._method_configs["DetachSubscription"].retry, - default_timeout=self._method_configs["DetachSubscription"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.DetachSubscriptionRequest(subscription=subscription,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["detach_subscription"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/google/cloud/pubsub_v1/gapic/publisher_client_config.py b/google/cloud/pubsub_v1/gapic/publisher_client_config.py deleted file mode 100644 index 8c96fd10b..000000000 --- a/google/cloud/pubsub_v1/gapic/publisher_client_config.py +++ /dev/null @@ -1,111 +0,0 @@ -config = { - "interfaces": { - "google.pubsub.v1.Publisher": { - "retry_codes": { - "idempotent": ["ABORTED", "UNAVAILABLE", "UNKNOWN"], - "non_idempotent2": [], - "non_idempotent": ["UNAVAILABLE"], - "idempotent2": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "none": [], - "publish": [ - "ABORTED", - "CANCELLED", - "DEADLINE_EXCEEDED", - "INTERNAL", - "RESOURCE_EXHAUSTED", - "UNAVAILABLE", - "UNKNOWN", - ], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000, - }, - "messaging": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 5000, - "rpc_timeout_multiplier": 1.3, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 60000, - }, - }, - "methods": { - "CreateTopic": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateTopic": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "Publish": { - "timeout_millis": 20000, - "retry_codes_name": "publish", - "retry_params_name": "messaging", - "bundling": { - "element_count_threshold": 100, - "element_count_limit": 1000, - "request_byte_threshold": 1048576, - "request_byte_limit": 10485760, - "delay_threshold_millis": 10, - }, - }, - "GetTopic": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListTopics": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListTopicSubscriptions": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListTopicSnapshots": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent2", - "retry_params_name": "default", - }, - "DeleteTopic": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "SetIamPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetIamPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "TestIamPermissions": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DetachSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent2", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/google/cloud/pubsub_v1/gapic/subscriber_client.py b/google/cloud/pubsub_v1/gapic/subscriber_client.py deleted file mode 100644 index 1e24ba02a..000000000 --- a/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ /dev/null @@ -1,2006 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.pubsub.v1 Subscriber API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import google.api_core.protobuf_helpers -import grpc - -from google.cloud.pubsub_v1.gapic import subscriber_client_config -from google.cloud.pubsub_v1.gapic.transports import subscriber_grpc_transport -from google.cloud.pubsub_v1.proto import pubsub_pb2 -from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import iam_policy_pb2_grpc -from google.iam.v1 import options_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-pubsub",).version - - -class SubscriberClient(object): - """ - The service that an application uses to manipulate subscriptions and - to consume messages from a subscription via the ``Pull`` method or by - establishing a bi-directional stream using the ``StreamingPull`` method. - """ - - SERVICE_ADDRESS = "pubsub.googleapis.com:443" - """The default address of the service.""" - - # The scopes needed to make gRPC calls to all of the methods defined in - # this service - _DEFAULT_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ) - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.pubsub.v1.Subscriber" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SubscriberClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project, - ) - - @classmethod - def snapshot_path(cls, project, snapshot): - """Return a fully-qualified snapshot string.""" - return google.api_core.path_template.expand( - "projects/{project}/snapshots/{snapshot}", - project=project, - snapshot=snapshot, - ) - - @classmethod - def subscription_path(cls, project, subscription): - """Return a fully-qualified subscription string.""" - return google.api_core.path_template.expand( - "projects/{project}/subscriptions/{subscription}", - project=project, - subscription=subscription, - ) - - @classmethod - def topic_path(cls, project, topic): - """Return a fully-qualified topic string.""" - return google.api_core.path_template.expand( - "projects/{project}/topics/{topic}", project=project, topic=topic, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.SubscriberGrpcTransport, - Callable[[~.Credentials, type], ~.SubscriberGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = subscriber_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=subscriber_grpc_transport.SubscriberGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = subscriber_grpc_transport.SubscriberGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def create_subscription( - self, - name, - topic, - push_config=None, - ack_deadline_seconds=None, - retain_acked_messages=None, - message_retention_duration=None, - labels=None, - enable_message_ordering=None, - expiration_policy=None, - filter_=None, - dead_letter_policy=None, - retry_policy=None, - detached=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a subscription to a given topic. See the resource name - rules. If the subscription already exists, returns ``ALREADY_EXISTS``. - If the corresponding topic doesn't exist, returns ``NOT_FOUND``. - - If the name is not provided in the request, the server will assign a - random name for this subscription on the same project as the topic, - conforming to the `resource name - format `__. - The generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the request. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> response = client.create_subscription(name, topic) - - Args: - name (str): Required. The name of the subscription. It must have the format - ``"projects/{project}/subscriptions/{subscription}"``. - ``{subscription}`` must start with a letter, and contain only letters - (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), underscores - (``_``), periods (``.``), tildes (``~``), plus (``+``) or percent signs - (``%``). It must be between 3 and 255 characters in length, and it must - not start with ``"goog"``. - topic (str): Required. The name of the topic from which this subscription is - receiving messages. Format is ``projects/{project}/topics/{topic}``. The - value of this field will be ``_deleted-topic_`` if the topic has been - deleted. - push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): If push delivery is used with this subscription, this field is used - to configure it. An empty ``pushConfig`` signifies that the subscriber - will pull and ack messages using API methods. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.PushConfig` - ack_deadline_seconds (int): The approximate amount of time (on a best-effort basis) Pub/Sub - waits for the subscriber to acknowledge receipt before resending the - message. In the interval after the message is delivered and before it is - acknowledged, it is considered to be outstanding. During that time - period, the message will not be redelivered (on a best-effort basis). - - For pull subscriptions, this value is used as the initial value for the - ack deadline. To override this value for a given message, call - ``ModifyAckDeadline`` with the corresponding ``ack_id`` if using - non-streaming pull or send the ``ack_id`` in a - ``StreamingModifyAckDeadlineRequest`` if using streaming pull. The - minimum custom deadline you can specify is 10 seconds. The maximum - custom deadline you can specify is 600 seconds (10 minutes). If this - parameter is 0, a default value of 10 seconds is used. - - For push delivery, this value is also used to set the request timeout - for the call to the push endpoint. - - If the subscriber never acknowledges the message, the Pub/Sub system - will eventually redeliver the message. - retain_acked_messages (bool): Indicates whether to retain acknowledged messages. If true, then - messages are not expunged from the subscription's backlog, even if they - are acknowledged, until they fall out of the - ``message_retention_duration`` window. This must be true if you would - like to Seek to a timestamp. - message_retention_duration (Union[dict, ~google.cloud.pubsub_v1.types.Duration]): How long to retain unacknowledged messages in the subscription's - backlog, from the moment a message is published. If - ``retain_acked_messages`` is true, then this also configures the - retention of acknowledged messages, and thus configures how far back in - time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7 - days or less than 10 minutes. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Duration` - labels (dict[str -> str]): See Creating and - managing labels. - enable_message_ordering (bool): If true, messages published with the same ``ordering_key`` in - ``PubsubMessage`` will be delivered to the subscribers in the order in - which they are received by the Pub/Sub system. Otherwise, they may be - delivered in any order. EXPERIMENTAL: This feature is part of a closed - alpha release. This API might be changed in backward-incompatible ways - and is not recommended for production use. It is not subject to any SLA - or deprecation policy. - expiration_policy (Union[dict, ~google.cloud.pubsub_v1.types.ExpirationPolicy]): A policy that specifies the conditions for this subscription's - expiration. A subscription is considered active as long as any connected - subscriber is successfully consuming messages from the subscription or - is issuing operations on the subscription. If ``expiration_policy`` is - not set, a *default policy* with ``ttl`` of 31 days will be used. The - minimum allowed value for ``expiration_policy.ttl`` is 1 day. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.ExpirationPolicy` - filter_ (str): An expression written in the Pub/Sub `filter - language `__. If - non-empty, then only ``PubsubMessage``\ s whose ``attributes`` field - matches the filter are delivered on this subscription. If empty, then no - messages are filtered out. - dead_letter_policy (Union[dict, ~google.cloud.pubsub_v1.types.DeadLetterPolicy]): A policy that specifies the conditions for dead lettering messages - in this subscription. If dead_letter_policy is not set, dead lettering - is disabled. - - The Cloud Pub/Sub service account associated with this subscriptions's - parent project (i.e., - service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must - have permission to Acknowledge() messages on this subscription. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.DeadLetterPolicy` - retry_policy (Union[dict, ~google.cloud.pubsub_v1.types.RetryPolicy]): A policy that specifies how Pub/Sub retries message delivery for this - subscription. - - If not set, the default retry policy is applied. This generally implies - that messages will be retried as soon as possible for healthy subscribers. - RetryPolicy will be triggered on NACKs or acknowledgement deadline - exceeded events for a given message. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.RetryPolicy` - detached (bool): Indicates whether the subscription is detached from its topic. - Detached subscriptions don't receive messages from their topic and don't - retain any backlog. ``Pull`` and ``StreamingPull`` requests will return - FAILED_PRECONDITION. If the subscription is a push subscription, pushes - to the endpoint will not be made. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Subscription` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_subscription" not in self._inner_api_calls: - self._inner_api_calls[ - "create_subscription" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_subscription, - default_retry=self._method_configs["CreateSubscription"].retry, - default_timeout=self._method_configs["CreateSubscription"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.Subscription( - name=name, - topic=topic, - push_config=push_config, - ack_deadline_seconds=ack_deadline_seconds, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration, - labels=labels, - enable_message_ordering=enable_message_ordering, - expiration_policy=expiration_policy, - filter=filter_, - dead_letter_policy=dead_letter_policy, - retry_policy=retry_policy, - detached=detached, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_subscription"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_subscription( - self, - subscription, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the configuration details of a subscription. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> response = client.get_subscription(subscription) - - Args: - subscription (str): Required. The name of the subscription to get. Format is - ``projects/{project}/subscriptions/{sub}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Subscription` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_subscription" not in self._inner_api_calls: - self._inner_api_calls[ - "get_subscription" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_subscription, - default_retry=self._method_configs["GetSubscription"].retry, - default_timeout=self._method_configs["GetSubscription"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_subscription"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_subscription( - self, - subscription, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates an existing subscription. Note that certain properties of a - subscription, such as its topic, are not modifiable. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> ack_deadline_seconds = 42 - >>> subscription_name = 'projects/my-project/subscriptions/my-subscription' - >>> subscription = { - ... 'name': subscription_name, - ... 'ack_deadline_seconds': ack_deadline_seconds, - ... } - >>> paths_element = 'ack_deadline_seconds' - >>> paths = [paths_element] - >>> update_mask = {'paths': paths} - >>> - >>> response = client.update_subscription(subscription, update_mask) - - Args: - subscription (Union[dict, ~google.cloud.pubsub_v1.types.Subscription]): Required. The updated subscription object. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Subscription` - update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Required. Indicates which fields in the provided subscription to update. - Must be specified and non-empty. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Subscription` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_subscription" not in self._inner_api_calls: - self._inner_api_calls[ - "update_subscription" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_subscription, - default_retry=self._method_configs["UpdateSubscription"].retry, - default_timeout=self._method_configs["UpdateSubscription"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.UpdateSubscriptionRequest( - subscription=subscription, update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription.name", subscription.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_subscription"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_subscriptions( - self, - project, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists matching subscriptions. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> project = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_subscriptions(project): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_subscriptions(project).pages: - ... for element in page: - ... # process element - ... pass - - Args: - project (str): Required. The name of the project in which to list subscriptions. - Format is ``projects/{project-id}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.GRPCIterator` instance. - An iterable of :class:`~google.cloud.pubsub_v1.types.Subscription` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_subscriptions" not in self._inner_api_calls: - self._inner_api_calls[ - "list_subscriptions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_subscriptions, - default_retry=self._method_configs["ListSubscriptions"].retry, - default_timeout=self._method_configs["ListSubscriptions"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ListSubscriptionsRequest( - project=project, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project", project)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_subscriptions"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="subscriptions", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_subscription( - self, - subscription, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes an existing subscription. All messages retained in the - subscription are immediately dropped. Calls to ``Pull`` after deletion - will return ``NOT_FOUND``. After a subscription is deleted, a new one - may be created with the same name, but the new one has no association - with the old subscription or its topic unless the same topic is - specified. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> client.delete_subscription(subscription) - - Args: - subscription (str): Required. The subscription to delete. Format is - ``projects/{project}/subscriptions/{sub}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_subscription" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_subscription" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_subscription, - default_retry=self._method_configs["DeleteSubscription"].retry, - default_timeout=self._method_configs["DeleteSubscription"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.DeleteSubscriptionRequest(subscription=subscription,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_subscription"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_snapshot( - self, - snapshot, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the configuration details of a snapshot. Snapshots are used in - Seek - operations, which allow you to manage message acknowledgments in bulk. That - is, you can set the acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') - >>> - >>> response = client.get_snapshot(snapshot) - - Args: - snapshot (str): Required. The name of the snapshot to get. Format is - ``projects/{project}/snapshots/{snap}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Snapshot` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_snapshot" not in self._inner_api_calls: - self._inner_api_calls[ - "get_snapshot" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_snapshot, - default_retry=self._method_configs["GetSnapshot"].retry, - default_timeout=self._method_configs["GetSnapshot"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.GetSnapshotRequest(snapshot=snapshot,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("snapshot", snapshot)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_snapshot"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def modify_ack_deadline( - self, - subscription, - ack_ids, - ack_deadline_seconds, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Modifies the ack deadline for a specific message. This method is - useful to indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if the - processing was interrupted. Note that this does not modify the - subscription-level ``ackDeadlineSeconds`` used for subsequent messages. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> # TODO: Initialize `ack_ids`: - >>> ack_ids = [] - >>> - >>> # TODO: Initialize `ack_deadline_seconds`: - >>> ack_deadline_seconds = 0 - >>> - >>> client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) - - Args: - subscription (str): Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - ack_ids (list[str]): Required. List of acknowledgment IDs. - ack_deadline_seconds (int): Required. The new ack deadline with respect to the time this request - was sent to the Pub/Sub system. For example, if the value is 10, the new - ack deadline will expire 10 seconds after the ``ModifyAckDeadline`` call - was made. Specifying zero might immediately make the message available - for delivery to another subscriber client. This typically results in an - increase in the rate of message redeliveries (that is, duplicates). The - minimum deadline you can specify is 0 seconds. The maximum deadline you - can specify is 600 seconds (10 minutes). - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "modify_ack_deadline" not in self._inner_api_calls: - self._inner_api_calls[ - "modify_ack_deadline" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.modify_ack_deadline, - default_retry=self._method_configs["ModifyAckDeadline"].retry, - default_timeout=self._method_configs["ModifyAckDeadline"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ModifyAckDeadlineRequest( - subscription=subscription, - ack_ids=ack_ids, - ack_deadline_seconds=ack_deadline_seconds, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["modify_ack_deadline"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def acknowledge( - self, - subscription, - ack_ids, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Acknowledges the messages associated with the ``ack_ids`` in the - ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant - messages from the subscription. - - Acknowledging a message whose ack deadline has expired may succeed, but - such a message may be redelivered later. Acknowledging a message more - than once will not result in an error. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> # TODO: Initialize `ack_ids`: - >>> ack_ids = [] - >>> - >>> client.acknowledge(subscription, ack_ids) - - Args: - subscription (str): Required. The subscription whose message is being acknowledged. - Format is ``projects/{project}/subscriptions/{sub}``. - ack_ids (list[str]): Required. The acknowledgment ID for the messages being acknowledged - that was returned by the Pub/Sub system in the ``Pull`` response. Must - not be empty. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "acknowledge" not in self._inner_api_calls: - self._inner_api_calls[ - "acknowledge" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.acknowledge, - default_retry=self._method_configs["Acknowledge"].retry, - default_timeout=self._method_configs["Acknowledge"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.AcknowledgeRequest( - subscription=subscription, ack_ids=ack_ids, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["acknowledge"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def pull( - self, - subscription, - max_messages, - return_immediately=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Pulls messages from the server. The server may return - ``UNAVAILABLE`` if there are too many concurrent pull requests pending - for the given subscription. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> # TODO: Initialize `max_messages`: - >>> max_messages = 0 - >>> - >>> response = client.pull(subscription, max_messages) - - Args: - subscription (str): Required. The subscription from which messages should be pulled. - Format is ``projects/{project}/subscriptions/{sub}``. - max_messages (int): Required. The maximum number of messages to return for this request. Must - be a positive integer. The Pub/Sub system may return fewer than the number - specified. - return_immediately (bool): Optional. If this field set to true, the system will respond - immediately even if it there are no messages available to return in the - ``Pull`` response. Otherwise, the system may wait (for a bounded amount - of time) until at least one message is available, rather than returning - no messages. Warning: setting this field to ``true`` is discouraged - because it adversely impacts the performance of ``Pull`` operations. We - recommend that users do not set this field. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.PullResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "pull" not in self._inner_api_calls: - self._inner_api_calls["pull"] = google.api_core.gapic_v1.method.wrap_method( - self.transport.pull, - default_retry=self._method_configs["Pull"].retry, - default_timeout=self._method_configs["Pull"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.PullRequest( - subscription=subscription, - max_messages=max_messages, - return_immediately=return_immediately, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["pull"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def streaming_pull( - self, - requests, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Establishes a stream with the server, which sends messages down to - the client. The client streams acknowledgements and ack deadline - modifications back to the server. The server will close the stream and - return the status on any error. The server may close the stream with - status ``UNAVAILABLE`` to reassign server-side resources, in which case, - the client should re-establish the stream. Flow control can be achieved - by configuring the underlying RPC channel. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> # TODO: Initialize `stream_ack_deadline_seconds`: - >>> stream_ack_deadline_seconds = 0 - >>> request = {'subscription': subscription, 'stream_ack_deadline_seconds': stream_ack_deadline_seconds} - >>> - >>> requests = [request] - >>> for element in client.streaming_pull(requests): - ... # process element - ... pass - - Args: - requests (iterator[dict|google.cloud.pubsub_v1.proto.pubsub_pb2.StreamingPullRequest]): The input objects. If a dict is provided, it must be of the - same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.StreamingPullRequest` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - Iterable[~google.cloud.pubsub_v1.types.StreamingPullResponse]. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "streaming_pull" not in self._inner_api_calls: - self._inner_api_calls[ - "streaming_pull" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.streaming_pull, - default_retry=self._method_configs["StreamingPull"].retry, - default_timeout=self._method_configs["StreamingPull"].timeout, - client_info=self._client_info, - ) - - # Wrappers in api-core should not automatically pre-fetch the first - # stream result, as this breaks the stream when re-opening it. - # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 - self.transport.streaming_pull._prefetch_first_result_ = False - - return self._inner_api_calls["streaming_pull"]( - requests, retry=retry, timeout=timeout, metadata=metadata - ) - - def modify_push_config( - self, - subscription, - push_config, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Modifies the ``PushConfig`` for a specified subscription. - - This may be used to change a push subscription to a pull one (signified - by an empty ``PushConfig``) or vice versa, or change the endpoint URL - and other attributes of a push subscription. Messages will accumulate - for delivery continuously through the call regardless of changes to the - ``PushConfig``. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> # TODO: Initialize `push_config`: - >>> push_config = {} - >>> - >>> client.modify_push_config(subscription, push_config) - - Args: - subscription (str): Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): Required. The push configuration for future deliveries. - - An empty ``pushConfig`` indicates that the Pub/Sub system should stop - pushing messages from the given subscription and allow messages to be - pulled and acknowledged - effectively pausing the subscription if - ``Pull`` or ``StreamingPull`` is not called. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.PushConfig` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "modify_push_config" not in self._inner_api_calls: - self._inner_api_calls[ - "modify_push_config" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.modify_push_config, - default_retry=self._method_configs["ModifyPushConfig"].retry, - default_timeout=self._method_configs["ModifyPushConfig"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ModifyPushConfigRequest( - subscription=subscription, push_config=push_config, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["modify_push_config"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_snapshots( - self, - project, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the existing snapshots. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> project = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_snapshots(project): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_snapshots(project).pages: - ... for element in page: - ... # process element - ... pass - - Args: - project (str): Required. The name of the project in which to list snapshots. Format - is ``projects/{project-id}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.GRPCIterator` instance. - An iterable of :class:`~google.cloud.pubsub_v1.types.Snapshot` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_snapshots" not in self._inner_api_calls: - self._inner_api_calls[ - "list_snapshots" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_snapshots, - default_retry=self._method_configs["ListSnapshots"].retry, - default_timeout=self._method_configs["ListSnapshots"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ListSnapshotsRequest(project=project, page_size=page_size,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project", project)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_snapshots"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="snapshots", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def create_snapshot( - self, - name, - subscription, - labels=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a snapshot from the requested subscription. Snapshots are - used in Seek operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the acknowledgment state - of messages in an existing subscription to the state captured by a - snapshot. If the snapshot already exists, returns ``ALREADY_EXISTS``. If - the requested subscription doesn't exist, returns ``NOT_FOUND``. If the - backlog in the subscription is too old -- and the resulting snapshot - would expire in less than 1 hour -- then ``FAILED_PRECONDITION`` is - returned. See also the ``Snapshot.expire_time`` field. If the name is - not provided in the request, the server will assign a random name for - this snapshot on the same project as the subscription, conforming to the - `resource name - format `__. - The generated name is populated in the returned Snapshot object. Note - that for REST API requests, you must specify a name in the request. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> name = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> response = client.create_snapshot(name, subscription) - - Args: - name (str): Required. User-provided name for this snapshot. If the name is not - provided in the request, the server will assign a random name for this - snapshot on the same project as the subscription. Note that for REST API - requests, you must specify a name. See the resource name rules. Format - is ``projects/{project}/snapshots/{snap}``. - subscription (str): Required. The subscription whose backlog the snapshot retains. - Specifically, the created snapshot is guaranteed to retain: (a) The - existing backlog on the subscription. More precisely, this is defined as - the messages in the subscription's backlog that are unacknowledged upon - the successful completion of the ``CreateSnapshot`` request; as well as: - (b) Any messages published to the subscription's topic following the - successful completion of the CreateSnapshot request. Format is - ``projects/{project}/subscriptions/{sub}``. - labels (dict[str -> str]): See Creating and - managing labels. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Snapshot` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_snapshot" not in self._inner_api_calls: - self._inner_api_calls[ - "create_snapshot" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_snapshot, - default_retry=self._method_configs["CreateSnapshot"].retry, - default_timeout=self._method_configs["CreateSnapshot"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.CreateSnapshotRequest( - name=name, subscription=subscription, labels=labels, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_snapshot"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_snapshot( - self, - snapshot, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> seconds = 123456 - >>> expire_time = {'seconds': seconds} - >>> snapshot_name = 'projects/my-project/snapshots/my-snapshot' - >>> snapshot = { - ... 'name': snapshot_name, - ... 'expire_time': expire_time, - ... } - >>> paths_element = 'expire_time' - >>> paths = [paths_element] - >>> update_mask = {'paths': paths} - >>> - >>> response = client.update_snapshot(snapshot, update_mask) - - Args: - snapshot (Union[dict, ~google.cloud.pubsub_v1.types.Snapshot]): Required. The updated snapshot object. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Snapshot` - update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Required. Indicates which fields in the provided snapshot to update. - Must be specified and non-empty. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Snapshot` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_snapshot" not in self._inner_api_calls: - self._inner_api_calls[ - "update_snapshot" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_snapshot, - default_retry=self._method_configs["UpdateSnapshot"].retry, - default_timeout=self._method_configs["UpdateSnapshot"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.UpdateSnapshotRequest( - snapshot=snapshot, update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("snapshot.name", snapshot.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_snapshot"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_snapshot( - self, - snapshot, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Removes an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- When the snapshot is deleted, all messages retained in the snapshot - are immediately dropped. After a snapshot is deleted, a new one may be - created with the same name, but the new one has no association with the old - snapshot or its subscription, unless the same subscription is specified. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') - >>> - >>> client.delete_snapshot(snapshot) - - Args: - snapshot (str): Required. The name of the snapshot to delete. Format is - ``projects/{project}/snapshots/{snap}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_snapshot" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_snapshot" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_snapshot, - default_retry=self._method_configs["DeleteSnapshot"].retry, - default_timeout=self._method_configs["DeleteSnapshot"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("snapshot", snapshot)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_snapshot"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def seek( - self, - subscription, - time=None, - snapshot=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. Note that both the subscription and the snapshot - must be on the same topic. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> response = client.seek(subscription) - - Args: - subscription (str): Required. The subscription to affect. - time (Union[dict, ~google.cloud.pubsub_v1.types.Timestamp]): The time to seek to. Messages retained in the subscription that were - published before this time are marked as acknowledged, and messages - retained in the subscription that were published after this time are - marked as unacknowledged. Note that this operation affects only those - messages retained in the subscription (configured by the combination of - ``message_retention_duration`` and ``retain_acked_messages``). For - example, if ``time`` corresponds to a point before the message retention - window (or to a point before the system's notion of the subscription - creation time), only retained messages will be marked as unacknowledged, - and already-expunged messages will not be restored. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Timestamp` - snapshot (str): The snapshot to seek to. The snapshot's topic must be the same as - that of the provided subscription. Format is - ``projects/{project}/snapshots/{snap}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.SeekResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "seek" not in self._inner_api_calls: - self._inner_api_calls["seek"] = google.api_core.gapic_v1.method.wrap_method( - self.transport.seek, - default_retry=self._method_configs["Seek"].retry, - default_timeout=self._method_configs["Seek"].timeout, - client_info=self._client_info, - ) - - # Sanity check: We have some fields which are mutually exclusive; - # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - time=time, snapshot=snapshot, - ) - - request = pubsub_pb2.SeekRequest( - subscription=subscription, time=time, snapshot=snapshot, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["seek"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def set_iam_policy( - self, - resource, - policy, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Sets the access control policy on the specified resource. Replaces - any existing policy. - - Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` - errors. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `policy`: - >>> policy = {} - >>> - >>> response = client.set_iam_policy(resource, policy) - - Args: - resource (str): REQUIRED: The resource for which the policy is being specified. - See the operation documentation for the appropriate value for this field. - policy (Union[dict, ~google.cloud.pubsub_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The - size of the policy is limited to a few 10s of KB. An empty policy is a - valid policy but certain Cloud Platform services (such as Projects) - might reject them. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Policy` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "set_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "set_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.set_iam_policy, - default_retry=self._method_configs["SetIamPolicy"].retry, - default_timeout=self._method_configs["SetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["set_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_iam_policy( - self, - resource, - options_=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the access control policy for a resource. Returns an empty policy - if the resource exists and does not have a policy set. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> response = client.get_iam_policy(resource) - - Args: - resource (str): REQUIRED: The resource for which the policy is being requested. - See the operation documentation for the appropriate value for this field. - options_ (Union[dict, ~google.cloud.pubsub_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to - ``GetIamPolicy``. This field is only used by Cloud IAM. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.GetPolicyOptions` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "get_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_iam_policy, - default_retry=self._method_configs["GetIamPolicy"].retry, - default_timeout=self._method_configs["GetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def test_iam_permissions( - self, - resource, - permissions, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns permissions that a caller has on the specified resource. If the - resource does not exist, this will return an empty set of - permissions, not a `NOT_FOUND` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `permissions`: - >>> permissions = [] - >>> - >>> response = client.test_iam_permissions(resource, permissions) - - Args: - resource (str): REQUIRED: The resource for which the policy detail is being requested. - See the operation documentation for the appropriate value for this field. - permissions (list[str]): The set of permissions to check for the ``resource``. Permissions - with wildcards (such as '*' or 'storage.*') are not allowed. For more - information see `IAM - Overview `__. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.TestIamPermissionsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "test_iam_permissions" not in self._inner_api_calls: - self._inner_api_calls[ - "test_iam_permissions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.test_iam_permissions, - default_retry=self._method_configs["TestIamPermissions"].retry, - default_timeout=self._method_configs["TestIamPermissions"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["test_iam_permissions"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/google/cloud/pubsub_v1/gapic/subscriber_client_config.py b/google/cloud/pubsub_v1/gapic/subscriber_client_config.py deleted file mode 100644 index fc3254975..000000000 --- a/google/cloud/pubsub_v1/gapic/subscriber_client_config.py +++ /dev/null @@ -1,144 +0,0 @@ -config = { - "interfaces": { - "google.pubsub.v1.Subscriber": { - "retry_codes": { - "idempotent": ["ABORTED", "UNAVAILABLE", "UNKNOWN"], - "non_idempotent": ["UNAVAILABLE"], - "idempotent2": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "streaming_pull": [ - "ABORTED", - "DEADLINE_EXCEEDED", - "INTERNAL", - "RESOURCE_EXHAUSTED", - "UNAVAILABLE", - ], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000, - }, - "messaging": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 25000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 25000, - "total_timeout_millis": 600000, - }, - "streaming_messaging": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 600000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 600000, - "total_timeout_millis": 600000, - }, - }, - "methods": { - "CreateSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "UpdateSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "ListSubscriptions": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetSnapshot": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent2", - "retry_params_name": "default", - }, - "ModifyAckDeadline": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "Acknowledge": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "messaging", - }, - "Pull": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "messaging", - }, - "StreamingPull": { - "timeout_millis": 900000, - "retry_codes_name": "streaming_pull", - "retry_params_name": "streaming_messaging", - }, - "ModifyPushConfig": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "ListSnapshots": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateSnapshot": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateSnapshot": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DeleteSnapshot": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "Seek": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "SetIamPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetIamPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "TestIamPermissions": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/google/cloud/pubsub_v1/gapic/transports/__init__.py b/google/cloud/pubsub_v1/gapic/transports/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py deleted file mode 100644 index bdba63555..000000000 --- a/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ /dev/null @@ -1,296 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc -from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2_grpc - - -class PublisherGrpcTransport(object): - """gRPC transport class providing stubs for - google.pubsub.v1 Publisher API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ) - - def __init__( - self, channel=None, credentials=None, address="pubsub.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "iam_policy_stub": iam_policy_pb2_grpc.IAMPolicyStub(channel), - "publisher_stub": pubsub_pb2_grpc.PublisherStub(channel), - } - - @classmethod - def create_channel( - cls, address="pubsub.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def create_topic(self): - """Return the gRPC stub for :meth:`PublisherClient.create_topic`. - - Creates the given topic with the given name. See the resource name - rules. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].CreateTopic - - @property - def update_topic(self): - """Return the gRPC stub for :meth:`PublisherClient.update_topic`. - - Updates an existing topic. Note that certain properties of a - topic are not modifiable. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].UpdateTopic - - @property - def publish(self): - """Return the gRPC stub for :meth:`PublisherClient.publish`. - - Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the - topic does not exist. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].Publish - - @property - def get_topic(self): - """Return the gRPC stub for :meth:`PublisherClient.get_topic`. - - Gets the configuration of a topic. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].GetTopic - - @property - def list_topics(self): - """Return the gRPC stub for :meth:`PublisherClient.list_topics`. - - Lists matching topics. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].ListTopics - - @property - def list_topic_subscriptions(self): - """Return the gRPC stub for :meth:`PublisherClient.list_topic_subscriptions`. - - Lists the names of the attached subscriptions on this topic. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].ListTopicSubscriptions - - @property - def list_topic_snapshots(self): - """Return the gRPC stub for :meth:`PublisherClient.list_topic_snapshots`. - - Lists the names of the snapshots on this topic. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].ListTopicSnapshots - - @property - def delete_topic(self): - """Return the gRPC stub for :meth:`PublisherClient.delete_topic`. - - Deletes the topic with the given name. Returns ``NOT_FOUND`` if the - topic does not exist. After a topic is deleted, a new topic may be - created with the same name; this is an entirely new topic with none of - the old configuration or subscriptions. Existing subscriptions to this - topic are not deleted, but their ``topic`` field is set to - ``_deleted-topic_``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].DeleteTopic - - @property - def set_iam_policy(self): - """Return the gRPC stub for :meth:`PublisherClient.set_iam_policy`. - - Sets the access control policy on the specified resource. Replaces - any existing policy. - - Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` - errors. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].SetIamPolicy - - @property - def get_iam_policy(self): - """Return the gRPC stub for :meth:`PublisherClient.get_iam_policy`. - - Gets the access control policy for a resource. Returns an empty policy - if the resource exists and does not have a policy set. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].GetIamPolicy - - @property - def test_iam_permissions(self): - """Return the gRPC stub for :meth:`PublisherClient.test_iam_permissions`. - - Returns permissions that a caller has on the specified resource. If the - resource does not exist, this will return an empty set of - permissions, not a `NOT_FOUND` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].TestIamPermissions - - @property - def detach_subscription(self): - """Return the gRPC stub for :meth:`PublisherClient.detach_subscription`. - - Detaches a subscription from this topic. All messages retained in - the subscription are dropped. Subsequent ``Pull`` and ``StreamingPull`` - requests will return FAILED_PRECONDITION. If the subscription is a push - subscription, pushes to the endpoint will stop. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].DetachSubscription diff --git a/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py deleted file mode 100644 index cd7a19bbe..000000000 --- a/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ /dev/null @@ -1,454 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc -from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2_grpc - - -class SubscriberGrpcTransport(object): - """gRPC transport class providing stubs for - google.pubsub.v1 Subscriber API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ) - - def __init__( - self, channel=None, credentials=None, address="pubsub.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "iam_policy_stub": iam_policy_pb2_grpc.IAMPolicyStub(channel), - "subscriber_stub": pubsub_pb2_grpc.SubscriberStub(channel), - } - - @classmethod - def create_channel( - cls, address="pubsub.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def create_subscription(self): - """Return the gRPC stub for :meth:`SubscriberClient.create_subscription`. - - Creates a subscription to a given topic. See the resource name - rules. If the subscription already exists, returns ``ALREADY_EXISTS``. - If the corresponding topic doesn't exist, returns ``NOT_FOUND``. - - If the name is not provided in the request, the server will assign a - random name for this subscription on the same project as the topic, - conforming to the `resource name - format `__. - The generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the request. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].CreateSubscription - - @property - def get_subscription(self): - """Return the gRPC stub for :meth:`SubscriberClient.get_subscription`. - - Gets the configuration details of a subscription. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].GetSubscription - - @property - def update_subscription(self): - """Return the gRPC stub for :meth:`SubscriberClient.update_subscription`. - - Updates an existing subscription. Note that certain properties of a - subscription, such as its topic, are not modifiable. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].UpdateSubscription - - @property - def list_subscriptions(self): - """Return the gRPC stub for :meth:`SubscriberClient.list_subscriptions`. - - Lists matching subscriptions. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].ListSubscriptions - - @property - def delete_subscription(self): - """Return the gRPC stub for :meth:`SubscriberClient.delete_subscription`. - - Deletes an existing subscription. All messages retained in the - subscription are immediately dropped. Calls to ``Pull`` after deletion - will return ``NOT_FOUND``. After a subscription is deleted, a new one - may be created with the same name, but the new one has no association - with the old subscription or its topic unless the same topic is - specified. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].DeleteSubscription - - @property - def get_snapshot(self): - """Return the gRPC stub for :meth:`SubscriberClient.get_snapshot`. - - Gets the configuration details of a snapshot. Snapshots are used in - Seek - operations, which allow you to manage message acknowledgments in bulk. That - is, you can set the acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].GetSnapshot - - @property - def modify_ack_deadline(self): - """Return the gRPC stub for :meth:`SubscriberClient.modify_ack_deadline`. - - Modifies the ack deadline for a specific message. This method is - useful to indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if the - processing was interrupted. Note that this does not modify the - subscription-level ``ackDeadlineSeconds`` used for subsequent messages. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].ModifyAckDeadline - - @property - def acknowledge(self): - """Return the gRPC stub for :meth:`SubscriberClient.acknowledge`. - - Acknowledges the messages associated with the ``ack_ids`` in the - ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant - messages from the subscription. - - Acknowledging a message whose ack deadline has expired may succeed, but - such a message may be redelivered later. Acknowledging a message more - than once will not result in an error. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].Acknowledge - - @property - def pull(self): - """Return the gRPC stub for :meth:`SubscriberClient.pull`. - - Pulls messages from the server. The server may return - ``UNAVAILABLE`` if there are too many concurrent pull requests pending - for the given subscription. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].Pull - - @property - def streaming_pull(self): - """Return the gRPC stub for :meth:`SubscriberClient.streaming_pull`. - - Establishes a stream with the server, which sends messages down to - the client. The client streams acknowledgements and ack deadline - modifications back to the server. The server will close the stream and - return the status on any error. The server may close the stream with - status ``UNAVAILABLE`` to reassign server-side resources, in which case, - the client should re-establish the stream. Flow control can be achieved - by configuring the underlying RPC channel. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].StreamingPull - - @property - def modify_push_config(self): - """Return the gRPC stub for :meth:`SubscriberClient.modify_push_config`. - - Modifies the ``PushConfig`` for a specified subscription. - - This may be used to change a push subscription to a pull one (signified - by an empty ``PushConfig``) or vice versa, or change the endpoint URL - and other attributes of a push subscription. Messages will accumulate - for delivery continuously through the call regardless of changes to the - ``PushConfig``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].ModifyPushConfig - - @property - def list_snapshots(self): - """Return the gRPC stub for :meth:`SubscriberClient.list_snapshots`. - - Lists the existing snapshots. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].ListSnapshots - - @property - def create_snapshot(self): - """Return the gRPC stub for :meth:`SubscriberClient.create_snapshot`. - - Creates a snapshot from the requested subscription. Snapshots are - used in Seek operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the acknowledgment state - of messages in an existing subscription to the state captured by a - snapshot. If the snapshot already exists, returns ``ALREADY_EXISTS``. If - the requested subscription doesn't exist, returns ``NOT_FOUND``. If the - backlog in the subscription is too old -- and the resulting snapshot - would expire in less than 1 hour -- then ``FAILED_PRECONDITION`` is - returned. See also the ``Snapshot.expire_time`` field. If the name is - not provided in the request, the server will assign a random name for - this snapshot on the same project as the subscription, conforming to the - `resource name - format `__. - The generated name is populated in the returned Snapshot object. Note - that for REST API requests, you must specify a name in the request. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].CreateSnapshot - - @property - def update_snapshot(self): - """Return the gRPC stub for :meth:`SubscriberClient.update_snapshot`. - - Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].UpdateSnapshot - - @property - def delete_snapshot(self): - """Return the gRPC stub for :meth:`SubscriberClient.delete_snapshot`. - - Removes an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- When the snapshot is deleted, all messages retained in the snapshot - are immediately dropped. After a snapshot is deleted, a new one may be - created with the same name, but the new one has no association with the old - snapshot or its subscription, unless the same subscription is specified. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].DeleteSnapshot - - @property - def seek(self): - """Return the gRPC stub for :meth:`SubscriberClient.seek`. - - Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. Note that both the subscription and the snapshot - must be on the same topic. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].Seek - - @property - def set_iam_policy(self): - """Return the gRPC stub for :meth:`SubscriberClient.set_iam_policy`. - - Sets the access control policy on the specified resource. Replaces - any existing policy. - - Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` - errors. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].SetIamPolicy - - @property - def get_iam_policy(self): - """Return the gRPC stub for :meth:`SubscriberClient.get_iam_policy`. - - Gets the access control policy for a resource. Returns an empty policy - if the resource exists and does not have a policy set. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].GetIamPolicy - - @property - def test_iam_permissions(self): - """Return the gRPC stub for :meth:`SubscriberClient.test_iam_permissions`. - - Returns permissions that a caller has on the specified resource. If the - resource does not exist, this will return an empty set of - permissions, not a `NOT_FOUND` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].TestIamPermissions diff --git a/google/cloud/pubsub_v1/open_telemetry/__init__.py b/google/cloud/pubsub_v1/open_telemetry/__init__.py new file mode 100644 index 000000000..e88bb5dbb --- /dev/null +++ b/google/cloud/pubsub_v1/open_telemetry/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2024, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/google/cloud/pubsub_v1/open_telemetry/context_propagation.py b/google/cloud/pubsub_v1/open_telemetry/context_propagation.py new file mode 100644 index 000000000..bfa1aa638 --- /dev/null +++ b/google/cloud/pubsub_v1/open_telemetry/context_propagation.py @@ -0,0 +1,55 @@ +# Copyright 2024, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional, List + +from opentelemetry.propagators.textmap import Setter, Getter + +from google.pubsub_v1 import PubsubMessage + + +class OpenTelemetryContextSetter(Setter): + """ + Used by Open Telemetry for context propagation. + """ + + def set(self, carrier: PubsubMessage, key: str, value: str) -> None: + """ + Injects trace context into Pub/Sub message attributes with + "googclient_" prefix. + + Args: + carrier(PubsubMessage): The Pub/Sub message which is the carrier of Open Telemetry + data. + key(str): The key for which the Open Telemetry context data needs to be set. + value(str): The Open Telemetry context value to be set. + + Returns: + None + """ + carrier.attributes["googclient_" + key] = value + + +class OpenTelemetryContextGetter(Getter): + """ + Used by Open Telemetry for context propagation. + """ + + def get(self, carrier: PubsubMessage, key: str) -> Optional[List[str]]: + if ("googclient_" + key) not in carrier.attributes: + return None + return [carrier.attributes["googclient_" + key]] + + def keys(self, carrier: PubsubMessage) -> List[str]: + return list(map(str, carrier.attributes.keys())) diff --git a/google/cloud/pubsub_v1/open_telemetry/publish_message_wrapper.py b/google/cloud/pubsub_v1/open_telemetry/publish_message_wrapper.py new file mode 100644 index 000000000..e03a8f800 --- /dev/null +++ b/google/cloud/pubsub_v1/open_telemetry/publish_message_wrapper.py @@ -0,0 +1,142 @@ +# Copyright 2017, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +from datetime import datetime +from typing import Optional + +from opentelemetry import trace +from opentelemetry.trace.propagation import set_span_in_context +from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator + +from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.context_propagation import ( + OpenTelemetryContextSetter, +) + + +class PublishMessageWrapper: + _OPEN_TELEMETRY_TRACER_NAME: str = "google.cloud.pubsub_v1" + _OPEN_TELEMETRY_MESSAGING_SYSTEM: str = "gcp_pubsub" + _OPEN_TELEMETRY_PUBLISHER_BATCHING = "publisher batching" + + _PUBLISH_START_EVENT: str = "publish start" + _PUBLISH_FLOW_CONTROL: str = "publisher flow control" + + def __init__(self, message: gapic_types.PubsubMessage): + self._message: gapic_types.PubsubMessage = message + self._create_span: Optional[trace.Span] = None + self._flow_control_span: Optional[trace.Span] = None + self._batching_span: Optional[trace.Span] = None + + @property + def message(self): + return self._message + + @message.setter # type: ignore[no-redef] # resetting message value is intentional here + def message(self, message: gapic_types.PubsubMessage): + self._message = message + + @property + def create_span(self): + return self._create_span + + def __eq__(self, other): # pragma: NO COVER + """Used for pytest asserts to compare two PublishMessageWrapper objects with the same message""" + if isinstance(self, other.__class__): + return self.message == other.message + return False + + def start_create_span(self, topic: str, ordering_key: str) -> None: + tracer = trace.get_tracer(self._OPEN_TELEMETRY_TRACER_NAME) + assert len(topic.split("/")) == 4 + topic_short_name = topic.split("/")[3] + with tracer.start_as_current_span( + name=f"{topic_short_name} create", + attributes={ + "messaging.system": self._OPEN_TELEMETRY_MESSAGING_SYSTEM, + "messaging.destination.name": topic_short_name, + "code.function": "publish", + "messaging.gcp_pubsub.message.ordering_key": ordering_key, + "messaging.operation": "create", + "gcp.project_id": topic.split("/")[1], + "messaging.message.body.size": sys.getsizeof( + self._message.data + ), # sys.getsizeof() used since the attribute expects size of message body in bytes + }, + kind=trace.SpanKind.PRODUCER, + end_on_exit=False, + ) as create_span: + create_span.add_event( + name=self._PUBLISH_START_EVENT, + attributes={ + "timestamp": str(datetime.now()), + }, + ) + self._create_span = create_span + TraceContextTextMapPropagator().inject( + carrier=self._message, + setter=OpenTelemetryContextSetter(), + ) + + def end_create_span(self, exc: Optional[BaseException] = None) -> None: + assert self._create_span is not None + if exc: + self._create_span.record_exception(exception=exc) + self._create_span.set_status( + trace.Status(status_code=trace.StatusCode.ERROR) + ) + self._create_span.end() + + def start_publisher_flow_control_span(self) -> None: + tracer = trace.get_tracer(self._OPEN_TELEMETRY_TRACER_NAME) + assert self._create_span is not None + with tracer.start_as_current_span( + name=self._PUBLISH_FLOW_CONTROL, + kind=trace.SpanKind.INTERNAL, + context=set_span_in_context(self._create_span), + end_on_exit=False, + ) as flow_control_span: + self._flow_control_span = flow_control_span + + def end_publisher_flow_control_span( + self, exc: Optional[BaseException] = None + ) -> None: + assert self._flow_control_span is not None + if exc: + self._flow_control_span.record_exception(exception=exc) + self._flow_control_span.set_status( + trace.Status(status_code=trace.StatusCode.ERROR) + ) + self._flow_control_span.end() + + def start_publisher_batching_span(self) -> None: + assert self._create_span is not None + tracer = trace.get_tracer(self._OPEN_TELEMETRY_TRACER_NAME) + with tracer.start_as_current_span( + name=self._OPEN_TELEMETRY_PUBLISHER_BATCHING, + kind=trace.SpanKind.INTERNAL, + context=set_span_in_context(self._create_span), + end_on_exit=False, + ) as batching_span: + self._batching_span = batching_span + + def end_publisher_batching_span(self, exc: Optional[BaseException] = None) -> None: + assert self._batching_span is not None + if exc: + self._batching_span.record_exception(exception=exc) + self._batching_span.set_status( + trace.Status(status_code=trace.StatusCode.ERROR) + ) + self._batching_span.end() diff --git a/google/cloud/pubsub_v1/open_telemetry/subscribe_opentelemetry.py b/google/cloud/pubsub_v1/open_telemetry/subscribe_opentelemetry.py new file mode 100644 index 000000000..5a6abd21b --- /dev/null +++ b/google/cloud/pubsub_v1/open_telemetry/subscribe_opentelemetry.py @@ -0,0 +1,288 @@ +# Copyright 2024, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional, List +from datetime import datetime + +from opentelemetry import trace, context +from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator +from opentelemetry.trace.propagation import set_span_in_context + +from google.cloud.pubsub_v1.open_telemetry.context_propagation import ( + OpenTelemetryContextGetter, +) +from google.pubsub_v1.types import PubsubMessage + +_OPEN_TELEMETRY_TRACER_NAME: str = "google.cloud.pubsub_v1" +_OPEN_TELEMETRY_MESSAGING_SYSTEM: str = "gcp_pubsub" + + +class SubscribeOpenTelemetry: + def __init__(self, message: PubsubMessage): + self._message: PubsubMessage = message + + # subscribe span will be initialized by the `start_subscribe_span` + # method. + self._subscribe_span: Optional[trace.Span] = None + + # subscriber concurrency control span will be initialized by the + # `start_subscribe_concurrency_control_span` method. + self._concurrency_control_span: Optional[trace.Span] = None + + # scheduler span will be initialized by the + # `start_subscribe_scheduler_span` method. + self._scheduler_span: Optional[trace.Span] = None + + # This will be set by `start_subscribe_span` method and will be used + # for other spans, such as process span. + self._subscription_id: Optional[str] = None + + # This will be set by `start_process_span` method. + self._process_span: Optional[trace.Span] = None + + # This will be set by `start_subscribe_span` method, if a publisher create span + # context was extracted from trace propagation. And will be used by spans like + # proces span to add links to the publisher create span. + self._publisher_create_span_context: Optional[context.Context] = None + + # This will be set by `start_subscribe_span` method and will be used + # for other spans, such as modack span. + self._project_id: Optional[str] = None + + @property + def subscription_id(self) -> Optional[str]: + return self._subscription_id + + @property + def project_id(self) -> Optional[str]: + return self._project_id + + @property + def subscribe_span(self) -> Optional[trace.Span]: + return self._subscribe_span + + def start_subscribe_span( + self, + subscription: str, + exactly_once_enabled: bool, + ack_id: str, + delivery_attempt: int, + ) -> None: + tracer = trace.get_tracer(_OPEN_TELEMETRY_TRACER_NAME) + parent_span_context = TraceContextTextMapPropagator().extract( + carrier=self._message, + getter=OpenTelemetryContextGetter(), + ) + self._publisher_create_span_context = parent_span_context + split_subscription: List[str] = subscription.split("/") + assert len(split_subscription) == 4 + subscription_short_name = split_subscription[3] + self._project_id = split_subscription[1] + self._subscription_id = subscription_short_name + with tracer.start_as_current_span( + name=f"{subscription_short_name} subscribe", + context=parent_span_context if parent_span_context else None, + kind=trace.SpanKind.CONSUMER, + attributes={ + "messaging.system": _OPEN_TELEMETRY_MESSAGING_SYSTEM, + "messaging.destination.name": subscription_short_name, + "gcp.project_id": subscription.split("/")[1], + "messaging.message.id": self._message.message_id, + "messaging.message.body.size": len(self._message.data), + "messaging.gcp_pubsub.message.ack_id": ack_id, + "messaging.gcp_pubsub.message.ordering_key": self._message.ordering_key, + "messaging.gcp_pubsub.message.exactly_once_delivery": exactly_once_enabled, + "code.function": "_on_response", + "messaging.gcp_pubsub.message.delivery_attempt": delivery_attempt, + }, + end_on_exit=False, + ) as subscribe_span: + self._subscribe_span = subscribe_span + + def add_subscribe_span_event(self, event: str) -> None: + assert self._subscribe_span is not None + self._subscribe_span.add_event( + name=event, + attributes={ + "timestamp": str(datetime.now()), + }, + ) + + def end_subscribe_span(self) -> None: + assert self._subscribe_span is not None + self._subscribe_span.end() + + def set_subscribe_span_result(self, result: str) -> None: + assert self._subscribe_span is not None + self._subscribe_span.set_attribute( + key="messaging.gcp_pubsub.result", + value=result, + ) + + def start_subscribe_concurrency_control_span(self) -> None: + assert self._subscribe_span is not None + tracer = trace.get_tracer(_OPEN_TELEMETRY_TRACER_NAME) + with tracer.start_as_current_span( + name="subscriber concurrency control", + kind=trace.SpanKind.INTERNAL, + context=set_span_in_context(self._subscribe_span), + end_on_exit=False, + ) as concurrency_control_span: + self._concurrency_control_span = concurrency_control_span + + def end_subscribe_concurrency_control_span(self) -> None: + assert self._concurrency_control_span is not None + self._concurrency_control_span.end() + + def start_subscribe_scheduler_span(self) -> None: + assert self._subscribe_span is not None + tracer = trace.get_tracer(_OPEN_TELEMETRY_TRACER_NAME) + with tracer.start_as_current_span( + name="subscriber scheduler", + kind=trace.SpanKind.INTERNAL, + context=set_span_in_context(self._subscribe_span), + end_on_exit=False, + ) as scheduler_span: + self._scheduler_span = scheduler_span + + def end_subscribe_scheduler_span(self) -> None: + assert self._scheduler_span is not None + self._scheduler_span.end() + + def start_process_span(self) -> trace.Span: + assert self._subscribe_span is not None + tracer = trace.get_tracer(_OPEN_TELEMETRY_TRACER_NAME) + publish_create_span_link: Optional[trace.Link] = None + if self._publisher_create_span_context: + publish_create_span: trace.Span = trace.get_current_span( + self._publisher_create_span_context + ) + span_context: Optional[ + trace.SpanContext + ] = publish_create_span.get_span_context() + publish_create_span_link = ( + trace.Link(span_context) if span_context else None + ) + + with tracer.start_as_current_span( + name=f"{self._subscription_id} process", + attributes={ + "messaging.system": _OPEN_TELEMETRY_MESSAGING_SYSTEM, + }, + kind=trace.SpanKind.INTERNAL, + context=set_span_in_context(self._subscribe_span), + links=[publish_create_span_link] if publish_create_span_link else None, + end_on_exit=False, + ) as process_span: + self._process_span = process_span + return process_span + + def end_process_span(self) -> None: + assert self._process_span is not None + self._process_span.end() + + def add_process_span_event(self, event: str) -> None: + assert self._process_span is not None + self._process_span.add_event( + name=event, + attributes={ + "timestamp": str(datetime.now()), + }, + ) + + def __enter__(self) -> trace.Span: + return self.start_process_span() + + def __exit__(self, exc_type, exc_val, traceback): + if self._process_span: + self.end_process_span() + + +def start_modack_span( + subscribe_span_links: List[trace.Link], + subscription_id: Optional[str], + message_count: int, + deadline: float, + project_id: Optional[str], + code_function: str, + receipt_modack: bool, +) -> trace.Span: + assert subscription_id is not None + assert project_id is not None + tracer = trace.get_tracer(_OPEN_TELEMETRY_TRACER_NAME) + with tracer.start_as_current_span( + name=f"{subscription_id} modack", + attributes={ + "messaging.system": _OPEN_TELEMETRY_MESSAGING_SYSTEM, + "messaging.batch.message_count": message_count, + "messaging.gcp_pubsub.message.ack_deadline": deadline, + "messaging.destination.name": subscription_id, + "gcp.project_id": project_id, + "messaging.operation.name": "modack", + "code.function": code_function, + "messaging.gcp_pubsub.is_receipt_modack": receipt_modack, + }, + links=subscribe_span_links, + kind=trace.SpanKind.CLIENT, + end_on_exit=False, + ) as modack_span: + return modack_span + + +def start_ack_span( + subscription_id: str, + message_count: int, + project_id: str, + links: List[trace.Link], +) -> trace.Span: + tracer = trace.get_tracer(_OPEN_TELEMETRY_TRACER_NAME) + with tracer.start_as_current_span( + name=f"{subscription_id} ack", + attributes={ + "messaging.system": _OPEN_TELEMETRY_MESSAGING_SYSTEM, + "messaging.batch.message_count": message_count, + "messaging.operation": "ack", + "gcp.project_id": project_id, + "messaging.destination.name": subscription_id, + "code.function": "ack", + }, + kind=trace.SpanKind.CLIENT, + links=links, + end_on_exit=False, + ) as ack_span: + return ack_span + + +def start_nack_span( + subscription_id: str, + message_count: int, + project_id: str, + links: List[trace.Link], +) -> trace.Span: + tracer = trace.get_tracer(_OPEN_TELEMETRY_TRACER_NAME) + with tracer.start_as_current_span( + name=f"{subscription_id} nack", + attributes={ + "messaging.system": _OPEN_TELEMETRY_MESSAGING_SYSTEM, + "messaging.batch.message_count": message_count, + "messaging.operation": "nack", + "gcp.project_id": project_id, + "messaging.destination.name": subscription_id, + "code.function": "modify_ack_deadline", + }, + kind=trace.SpanKind.CLIENT, + links=links, + end_on_exit=False, + ) as nack_span: + return nack_span diff --git a/google/cloud/pubsub_v1/proto/__init__.py b/google/cloud/pubsub_v1/proto/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/google/cloud/pubsub_v1/proto/pubsub.proto b/google/cloud/pubsub_v1/proto/pubsub.proto index dc9151446..716c7ba05 100644 --- a/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/google/cloud/pubsub_v1/proto/pubsub.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -24,6 +24,7 @@ import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; +import "google/pubsub/v1/schema.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.PubSub.V1"; @@ -42,9 +43,8 @@ service Publisher { "https://www.googleapis.com/auth/cloud-platform," "https://www.googleapis.com/auth/pubsub"; - // Creates the given topic with the given name. See the - // - // resource name rules. + // Creates the given topic with the given name. See the [resource name rules] + // (https://cloud.google.com/pubsub/docs/admin#resource_names). rpc CreateTopic(Topic) returns (Topic) { option (google.api.http) = { put: "/v1/{name=projects/*/topics/*}" @@ -98,11 +98,10 @@ service Publisher { } // Lists the names of the snapshots on this topic. Snapshots are used in - // Seek - // operations, which allow - // you to manage message acknowledgments in bulk. That is, you can set the - // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot. + // [Seek](https://cloud.google.com/pubsub/docs/replay-overview) operations, + // which allow you to manage message acknowledgments in bulk. That is, you can + // set the acknowledgment state of messages in an existing subscription to the + // state captured by a snapshot. rpc ListTopicSnapshots(ListTopicSnapshotsRequest) returns (ListTopicSnapshotsResponse) { option (google.api.http) = { @@ -145,6 +144,21 @@ message MessageStoragePolicy { repeated string allowed_persistence_regions = 1; } +// Settings for validating messages published against a schema. +message SchemaSettings { + // Required. The name of the schema that messages published should be + // validated against. Format is `projects/{project}/schemas/{schema}`. The + // value of this field will be `_deleted-schema_` if the schema has been + // deleted. + string schema = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Schema" } + ]; + + // The encoding of messages validated against `schema`. + Encoding encoding = 2; +} + // A topic resource. message Topic { option (google.api.resource) = { @@ -161,8 +175,8 @@ message Topic { // must not start with `"goog"`. string name = 1 [(google.api.field_behavior) = REQUIRED]; - // See Creating and - // managing labels. + // See [Creating and managing labels] + // (https://cloud.google.com/pubsub/docs/labels). map labels = 2; // Policy constraining the set of Google Cloud Platform regions where messages @@ -175,16 +189,33 @@ message Topic { // // The expected format is `projects/*/locations/*/keyRings/*/cryptoKeys/*`. string kms_key_name = 5; + + // Settings for validating messages published against a schema. + SchemaSettings schema_settings = 6; + + // Reserved for future use. This field is set only in responses from the + // server; it is ignored if it is set in any requests. + bool satisfies_pzs = 7; + + // Indicates the minimum duration to retain a message after it is published to + // the topic. If this field is set, messages published to the topic in the + // last `message_retention_duration` are always available to subscribers. For + // instance, it allows any attached subscription to [seek to a + // timestamp](https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time) + // that is up to `message_retention_duration` in the past. If this field is + // not set, message retention is controlled by settings on individual + // subscriptions. Cannot be more than 7 days or less than 10 minutes. + google.protobuf.Duration message_retention_duration = 8; } // A message that is published by publishers and consumed by subscribers. The // message must contain either a non-empty data field or at least one attribute. // Note that client libraries represent this object differently -// depending on the language. See the corresponding -// client -// library documentation for more information. See -// Quotas and limits -// for more information about message limits. +// depending on the language. See the corresponding [client library +// documentation](https://cloud.google.com/pubsub/docs/reference/libraries) for +// more information. See [quotas and limits] +// (https://cloud.google.com/pubsub/quotas) for more information about message +// limits. message PubsubMessage { // The message data field. If this field is empty, the message must contain // at least one attribute. @@ -212,9 +243,6 @@ message PubsubMessage { // delivered to subscribers in the order in which they are received by the // Pub/Sub system. All `PubsubMessage`s published in a given `PublishRequest` // must specify the same `ordering_key` value. - // EXPERIMENTAL: This feature is part of a closed alpha release. This - // API might be changed in backward-incompatible ways and is not recommended - // for production use. It is not subject to any SLA or deprecation policy. string ordering_key = 5; } @@ -388,19 +416,17 @@ service Subscriber { "https://www.googleapis.com/auth/cloud-platform," "https://www.googleapis.com/auth/pubsub"; - // Creates a subscription to a given topic. See the - // - // resource name rules. + // Creates a subscription to a given topic. See the [resource name rules] + // (https://cloud.google.com/pubsub/docs/admin#resource_names). // If the subscription already exists, returns `ALREADY_EXISTS`. // If the corresponding topic doesn't exist, returns `NOT_FOUND`. // // If the name is not provided in the request, the server will assign a random // name for this subscription on the same project as the topic, conforming - // to the - // [resource name - // format](https://cloud.google.com/pubsub/docs/admin#resource_names). The - // generated name is populated in the returned Subscription object. Note that - // for REST API requests, you must specify a name in the request. + // to the [resource name format] + // (https://cloud.google.com/pubsub/docs/admin#resource_names). The generated + // name is populated in the returned Subscription object. Note that for REST + // API requests, you must specify a name in the request. rpc CreateSubscription(Subscription) returns (Subscription) { option (google.api.http) = { put: "/v1/{name=projects/*/subscriptions/*}" @@ -489,6 +515,7 @@ service Subscriber { }; option (google.api.method_signature) = "subscription,return_immediately,max_messages"; + option (google.api.method_signature) = "subscription,max_messages"; } // Establishes a stream with the server, which sends messages down to the @@ -528,12 +555,11 @@ service Subscriber { option (google.api.method_signature) = "snapshot"; } - // Lists the existing snapshots. Snapshots are used in - // Seek - // operations, which allow - // you to manage message acknowledgments in bulk. That is, you can set the - // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot. + // Lists the existing snapshots. Snapshots are used in [Seek]( + // https://cloud.google.com/pubsub/docs/replay-overview) operations, which + // allow you to manage message acknowledgments in bulk. That is, you can set + // the acknowledgment state of messages in an existing subscription to the + // state captured by a snapshot. rpc ListSnapshots(ListSnapshotsRequest) returns (ListSnapshotsResponse) { option (google.api.http) = { get: "/v1/{project=projects/*}/snapshots" @@ -542,21 +568,19 @@ service Subscriber { } // Creates a snapshot from the requested subscription. Snapshots are used in - // Seek - // operations, which allow - // you to manage message acknowledgments in bulk. That is, you can set the - // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot. - //

If the snapshot already exists, returns `ALREADY_EXISTS`. + // [Seek](https://cloud.google.com/pubsub/docs/replay-overview) operations, + // which allow you to manage message acknowledgments in bulk. That is, you can + // set the acknowledgment state of messages in an existing subscription to the + // state captured by a snapshot. + // If the snapshot already exists, returns `ALREADY_EXISTS`. // If the requested subscription doesn't exist, returns `NOT_FOUND`. // If the backlog in the subscription is too old -- and the resulting snapshot // would expire in less than 1 hour -- then `FAILED_PRECONDITION` is returned. // See also the `Snapshot.expire_time` field. If the name is not provided in // the request, the server will assign a random // name for this snapshot on the same project as the subscription, conforming - // to the - // [resource name - // format](https://cloud.google.com/pubsub/docs/admin#resource_names). The + // to the [resource name format] + // (https://cloud.google.com/pubsub/docs/admin#resource_names). The // generated name is populated in the returned Snapshot object. Note that for // REST API requests, you must specify a name in the request. rpc CreateSnapshot(CreateSnapshotRequest) returns (Snapshot) { @@ -580,12 +604,11 @@ service Subscriber { }; } - // Removes an existing snapshot. Snapshots are used in - // Seek - // operations, which allow - // you to manage message acknowledgments in bulk. That is, you can set the - // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot.

+ // Removes an existing snapshot. Snapshots are used in [Seek] + // (https://cloud.google.com/pubsub/docs/replay-overview) operations, which + // allow you to manage message acknowledgments in bulk. That is, you can set + // the acknowledgment state of messages in an existing subscription to the + // state captured by a snapshot. // When the snapshot is deleted, all messages retained in the snapshot // are immediately dropped. After a snapshot is deleted, a new one may be // created with the same name, but the new one has no association with the old @@ -598,13 +621,12 @@ service Subscriber { } // Seeks an existing subscription to a point in time or to a given snapshot, - // whichever is provided in the request. Snapshots are used in - // Seek - // operations, which allow - // you to manage message acknowledgments in bulk. That is, you can set the - // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot. Note that both the subscription and the snapshot - // must be on the same topic. + // whichever is provided in the request. Snapshots are used in [Seek] + // (https://cloud.google.com/pubsub/docs/replay-overview) operations, which + // allow you to manage message acknowledgments in bulk. That is, you can set + // the acknowledgment state of messages in an existing subscription to the + // state captured by a snapshot. Note that both the subscription and the + // snapshot must be on the same topic. rpc Seek(SeekRequest) returns (SeekResponse) { option (google.api.http) = { post: "/v1/{subscription=projects/*/subscriptions/*}:seek" @@ -666,10 +688,8 @@ message Subscription { // Indicates whether to retain acknowledged messages. If true, then // messages are not expunged from the subscription's backlog, even if they are // acknowledged, until they fall out of the `message_retention_duration` - // window. This must be true if you would like to - // - // Seek to a timestamp. + // window. This must be true if you would like to [Seek to a timestamp] + // (https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time). bool retain_acked_messages = 7; // How long to retain unacknowledged messages in the subscription's backlog, @@ -688,9 +708,6 @@ message Subscription { // will be delivered to the subscribers in the order in which they // are received by the Pub/Sub system. Otherwise, they may be delivered in // any order. - // EXPERIMENTAL: This feature is part of a closed alpha release. This - // API might be changed in backward-incompatible ways and is not recommended - // for production use. It is not subject to any SLA or deprecation policy. bool enable_message_ordering = 10; // A policy that specifies the conditions for this subscription's expiration. @@ -733,6 +750,15 @@ message Subscription { // FAILED_PRECONDITION. If the subscription is a push subscription, pushes to // the endpoint will not be made. bool detached = 15; + + // Output only. Indicates the minimum duration for which a message is retained + // after it is published to the subscription's topic. If this field is set, + // messages published to the subscription's topic in the last + // `topic_message_retention_duration` are always available to subscribers. See + // the `message_retention_duration` field in `Topic`. This field is set only + // in responses from the server; it is ignored if it is set in any requests. + google.protobuf.Duration topic_message_retention_duration = 17 + [(google.api.field_behavior) = OUTPUT_ONLY]; } // A policy that specifies how Cloud Pub/Sub retries message delivery. @@ -1136,8 +1162,18 @@ message StreamingPullRequest { // Response for the `StreamingPull` method. This response is used to stream // messages from the server to the client. message StreamingPullResponse { + // Subscription properties sent as part of the response. + message SubscriptionProperties { + bool exactly_once_delivery_enabled = 1; + // True iff message ordering is enabled for this subscription. + bool message_ordering_enabled = 2; + } + // Received Pub/Sub messages. This will not be empty. repeated ReceivedMessage received_messages = 1; + + // Properties associated with this subscription. + SubscriptionProperties subscription_properties = 4; } // Request for the `CreateSnapshot` method. @@ -1186,11 +1222,10 @@ message UpdateSnapshotRequest { } // A snapshot resource. Snapshots are used in -// Seek -// operations, which allow -// you to manage message acknowledgments in bulk. That is, you can set the -// acknowledgment state of messages in an existing subscription to the state -// captured by a snapshot. +// [Seek](https://cloud.google.com/pubsub/docs/replay-overview) +// operations, which allow you to manage message acknowledgments in bulk. That +// is, you can set the acknowledgment state of messages in an existing +// subscription to the state captured by a snapshot. message Snapshot { option (google.api.resource) = { type: "pubsub.googleapis.com/Snapshot" @@ -1217,8 +1252,8 @@ message Snapshot { // snapshot that would expire in less than 1 hour after creation. google.protobuf.Timestamp expire_time = 3; - // See Creating and - // managing labels. + // See [Creating and managing labels] + // (https://cloud.google.com/pubsub/docs/labels). map labels = 4; } diff --git a/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/google/cloud/pubsub_v1/proto/pubsub_pb2.py deleted file mode 100644 index 44dc06898..000000000 --- a/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ /dev/null @@ -1,5246 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/pubsub_v1/proto/pubsub.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/pubsub_v1/proto/pubsub.proto", - package="google.pubsub.v1", - syntax="proto3", - serialized_options=b"\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xb2\x02\n\x05Topic\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:T\xea\x41Q\n\x1bpubsub.googleapis.com/Topic\x12!projects/{project}/topics/{topic}\x12\x0f_deleted-topic_"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"E\n\x0fGetTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"w\n\x12UpdateTopicRequest\x12+\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.TopicB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"|\n\x0ePublishRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x36\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessageB\x03\xe0\x41\x02"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"\x80\x01\n\x11ListTopicsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"z\n\x1dListTopicSubscriptionsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"y\n\x1eListTopicSubscriptionsResponse\x12>\n\rsubscriptions\x18\x01 \x03(\tB\'\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"v\n\x19ListTopicSnapshotsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x12\x44\x65leteTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"]\n\x19\x44\x65tachSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x1c\n\x1a\x44\x65tachSubscriptionResponse"\xc0\x05\n\x0cSubscription\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x32\n\x05topic\x18\x02 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x12\x0e\n\x06\x66ilter\x18\x0c \x01(\t\x12>\n\x12\x64\x65\x61\x64_letter_policy\x18\r \x01(\x0b\x32".google.pubsub.v1.DeadLetterPolicy\x12\x33\n\x0cretry_policy\x18\x0e \x01(\x0b\x32\x1d.google.pubsub.v1.RetryPolicy\x12\x10\n\x08\x64\x65tached\x18\x0f \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:X\xea\x41U\n"pubsub.googleapis.com/Subscription\x12/projects/{project}/subscriptions/{subscription}"u\n\x0bRetryPolicy\x12\x32\n\x0fminimum_backoff\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x32\n\x0fmaximum_backoff\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"L\n\x10\x44\x65\x61\x64LetterPolicy\x12\x19\n\x11\x64\x65\x61\x64_letter_topic\x18\x01 \x01(\t\x12\x1d\n\x15max_delivery_attempts\x18\x02 \x01(\x05":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"m\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\x12\x18\n\x10\x64\x65livery_attempt\x18\x03 \x01(\x05"Z\n\x16GetSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x8c\x01\n\x19UpdateSubscriptionRequest\x12\x39\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.SubscriptionB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\x87\x01\n\x18ListSubscriptionsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"]\n\x19\x44\x65leteSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x93\x01\n\x17ModifyPushConfigRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x36\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfigB\x03\xe0\x41\x02"\x8d\x01\n\x0bPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12!\n\x12return_immediately\x18\x02 \x01(\x08\x42\x05\x18\x01\xe0\x41\x01\x12\x19\n\x0cmax_messages\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x95\x01\n\x18ModifyAckDeadlineRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x04 \x03(\tB\x03\xe0\x41\x02\x12!\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"l\n\x12\x41\x63knowledgeRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x02 \x03(\tB\x03\xe0\x41\x02"\xa9\x02\n\x14StreamingPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12(\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\x42\x03\xe0\x41\x02\x12\x11\n\tclient_id\x18\x06 \x01(\t\x12 \n\x18max_outstanding_messages\x18\x07 \x01(\x03\x12\x1d\n\x15max_outstanding_bytes\x18\x08 \x01(\x03"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x83\x02\n\x15\x43reateSnapshotRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot\x12@\n\x0csubscription\x18\x02 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x80\x01\n\x15UpdateSnapshotRequest\x12\x31\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.SnapshotB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\xaf\x02\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05topic\x18\x02 \x01(\tB \xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:L\xea\x41I\n\x1epubsub.googleapis.com/Snapshot\x12\'projects/{project}/snapshots/{snapshot}"N\n\x12GetSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\x83\x01\n\x14ListSnapshotsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x15\x44\x65leteSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\xbe\x01\n\x0bSeekRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x37\n\x08snapshot\x18\x03 \x01(\tB#\xfa\x41 \n\x1epubsub.googleapis.com/SnapshotH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xa3\x0b\n\tPublisher\x12q\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic"0\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\xda\x41\x04name\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x93\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"C\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\xda\x41\x0etopic,messages\x12w\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x12\x8a\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"1\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\xda\x41\x07project\x12\xba\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"=\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\xda\x41\x05topic\x12\xaa\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"9\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\xda\x41\x05topic\x12|\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"/\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x12\xad\x01\n\x12\x44\x65tachSubscription\x12+.google.pubsub.v1.DetachSubscriptionRequest\x1a,.google.pubsub.v1.DetachSubscriptionResponse"<\x82\xd3\xe4\x93\x02\x36"4/v1/{subscription=projects/*/subscriptions/*}:detach\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\x83\x15\n\nSubscriber\x12\xb4\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"^\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\xda\x41+name,topic,push_config,ack_deadline_seconds\x12\xa1\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"D\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\xa6\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse"8\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\xda\x41\x07project\x12\x9f\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xcf\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"v\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\xda\x41)subscription,ack_ids,ack_deadline_seconds\x12\xa8\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"[\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\xda\x41\x14subscription,ack_ids\x12\xb3\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"l\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\xda\x41,subscription,return_immediately,max_messages\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xbb\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"d\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\xda\x41\x18subscription,push_config\x12\x89\x01\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"8\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x96\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"4\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\xda\x41\x07project\x12\x97\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"@\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\xda\x41\x11name,subscription\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x8b\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - - -_MESSAGESTORAGEPOLICY = _descriptor.Descriptor( - name="MessageStoragePolicy", - full_name="google.pubsub.v1.MessageStoragePolicy", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="allowed_persistence_regions", - full_name="google.pubsub.v1.MessageStoragePolicy.allowed_persistence_regions", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=306, - serialized_end=365, -) - - -_TOPIC_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.pubsub.v1.Topic.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.Topic.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.Topic.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=543, - serialized_end=588, -) - -_TOPIC = _descriptor.Descriptor( - name="Topic", - full_name="google.pubsub.v1.Topic", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.pubsub.v1.Topic.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.pubsub.v1.Topic.labels", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="message_storage_policy", - full_name="google.pubsub.v1.Topic.message_storage_policy", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="kms_key_name", - full_name="google.pubsub.v1.Topic.kms_key_name", - index=3, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_TOPIC_LABELSENTRY,], - enum_types=[], - serialized_options=b"\352AQ\n\033pubsub.googleapis.com/Topic\022!projects/{project}/topics/{topic}\022\017_deleted-topic_", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=368, - serialized_end=674, -) - - -_PUBSUBMESSAGE_ATTRIBUTESENTRY = _descriptor.Descriptor( - name="AttributesEntry", - full_name="google.pubsub.v1.PubsubMessage.AttributesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.PubsubMessage.AttributesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.PubsubMessage.AttributesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=869, - serialized_end=918, -) - -_PUBSUBMESSAGE = _descriptor.Descriptor( - name="PubsubMessage", - full_name="google.pubsub.v1.PubsubMessage", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="data", - full_name="google.pubsub.v1.PubsubMessage.data", - index=0, - number=1, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="attributes", - full_name="google.pubsub.v1.PubsubMessage.attributes", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="message_id", - full_name="google.pubsub.v1.PubsubMessage.message_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="publish_time", - full_name="google.pubsub.v1.PubsubMessage.publish_time", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ordering_key", - full_name="google.pubsub.v1.PubsubMessage.ordering_key", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_PUBSUBMESSAGE_ATTRIBUTESENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=677, - serialized_end=918, -) - - -_GETTOPICREQUEST = _descriptor.Descriptor( - name="GetTopicRequest", - full_name="google.pubsub.v1.GetTopicRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.GetTopicRequest.topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=920, - serialized_end=989, -) - - -_UPDATETOPICREQUEST = _descriptor.Descriptor( - name="UpdateTopicRequest", - full_name="google.pubsub.v1.UpdateTopicRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.UpdateTopicRequest.topic", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.pubsub.v1.UpdateTopicRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=991, - serialized_end=1110, -) - - -_PUBLISHREQUEST = _descriptor.Descriptor( - name="PublishRequest", - full_name="google.pubsub.v1.PublishRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.PublishRequest.topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="messages", - full_name="google.pubsub.v1.PublishRequest.messages", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1112, - serialized_end=1236, -) - - -_PUBLISHRESPONSE = _descriptor.Descriptor( - name="PublishResponse", - full_name="google.pubsub.v1.PublishResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="message_ids", - full_name="google.pubsub.v1.PublishResponse.message_ids", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1238, - serialized_end=1276, -) - - -_LISTTOPICSREQUEST = _descriptor.Descriptor( - name="ListTopicsRequest", - full_name="google.pubsub.v1.ListTopicsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="project", - full_name="google.pubsub.v1.ListTopicsRequest.project", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.pubsub.v1.ListTopicsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.pubsub.v1.ListTopicsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1279, - serialized_end=1407, -) - - -_LISTTOPICSRESPONSE = _descriptor.Descriptor( - name="ListTopicsResponse", - full_name="google.pubsub.v1.ListTopicsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topics", - full_name="google.pubsub.v1.ListTopicsResponse.topics", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.pubsub.v1.ListTopicsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1409, - serialized_end=1495, -) - - -_LISTTOPICSUBSCRIPTIONSREQUEST = _descriptor.Descriptor( - name="ListTopicSubscriptionsRequest", - full_name="google.pubsub.v1.ListTopicSubscriptionsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.ListTopicSubscriptionsRequest.topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.pubsub.v1.ListTopicSubscriptionsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.pubsub.v1.ListTopicSubscriptionsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1497, - serialized_end=1619, -) - - -_LISTTOPICSUBSCRIPTIONSRESPONSE = _descriptor.Descriptor( - name="ListTopicSubscriptionsResponse", - full_name="google.pubsub.v1.ListTopicSubscriptionsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscriptions", - full_name="google.pubsub.v1.ListTopicSubscriptionsResponse.subscriptions", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.pubsub.v1.ListTopicSubscriptionsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1621, - serialized_end=1742, -) - - -_LISTTOPICSNAPSHOTSREQUEST = _descriptor.Descriptor( - name="ListTopicSnapshotsRequest", - full_name="google.pubsub.v1.ListTopicSnapshotsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.ListTopicSnapshotsRequest.topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.pubsub.v1.ListTopicSnapshotsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.pubsub.v1.ListTopicSnapshotsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1744, - serialized_end=1862, -) - - -_LISTTOPICSNAPSHOTSRESPONSE = _descriptor.Descriptor( - name="ListTopicSnapshotsResponse", - full_name="google.pubsub.v1.ListTopicSnapshotsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="snapshots", - full_name="google.pubsub.v1.ListTopicSnapshotsResponse.snapshots", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.pubsub.v1.ListTopicSnapshotsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1864, - serialized_end=1936, -) - - -_DELETETOPICREQUEST = _descriptor.Descriptor( - name="DeleteTopicRequest", - full_name="google.pubsub.v1.DeleteTopicRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.DeleteTopicRequest.topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1938, - serialized_end=2010, -) - - -_DETACHSUBSCRIPTIONREQUEST = _descriptor.Descriptor( - name="DetachSubscriptionRequest", - full_name="google.pubsub.v1.DetachSubscriptionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.DetachSubscriptionRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2012, - serialized_end=2105, -) - - -_DETACHSUBSCRIPTIONRESPONSE = _descriptor.Descriptor( - name="DetachSubscriptionResponse", - full_name="google.pubsub.v1.DetachSubscriptionResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2107, - serialized_end=2135, -) - - -_SUBSCRIPTION_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.pubsub.v1.Subscription.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.Subscription.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.Subscription.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=543, - serialized_end=588, -) - -_SUBSCRIPTION = _descriptor.Descriptor( - name="Subscription", - full_name="google.pubsub.v1.Subscription", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.pubsub.v1.Subscription.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.Subscription.topic", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="push_config", - full_name="google.pubsub.v1.Subscription.push_config", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ack_deadline_seconds", - full_name="google.pubsub.v1.Subscription.ack_deadline_seconds", - index=3, - number=5, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="retain_acked_messages", - full_name="google.pubsub.v1.Subscription.retain_acked_messages", - index=4, - number=7, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="message_retention_duration", - full_name="google.pubsub.v1.Subscription.message_retention_duration", - index=5, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.pubsub.v1.Subscription.labels", - index=6, - number=9, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="enable_message_ordering", - full_name="google.pubsub.v1.Subscription.enable_message_ordering", - index=7, - number=10, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="expiration_policy", - full_name="google.pubsub.v1.Subscription.expiration_policy", - index=8, - number=11, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.pubsub.v1.Subscription.filter", - index=9, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dead_letter_policy", - full_name="google.pubsub.v1.Subscription.dead_letter_policy", - index=10, - number=13, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="retry_policy", - full_name="google.pubsub.v1.Subscription.retry_policy", - index=11, - number=14, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="detached", - full_name="google.pubsub.v1.Subscription.detached", - index=12, - number=15, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_SUBSCRIPTION_LABELSENTRY,], - enum_types=[], - serialized_options=b'\352AU\n"pubsub.googleapis.com/Subscription\022/projects/{project}/subscriptions/{subscription}', - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2138, - serialized_end=2842, -) - - -_RETRYPOLICY = _descriptor.Descriptor( - name="RetryPolicy", - full_name="google.pubsub.v1.RetryPolicy", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="minimum_backoff", - full_name="google.pubsub.v1.RetryPolicy.minimum_backoff", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="maximum_backoff", - full_name="google.pubsub.v1.RetryPolicy.maximum_backoff", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2844, - serialized_end=2961, -) - - -_DEADLETTERPOLICY = _descriptor.Descriptor( - name="DeadLetterPolicy", - full_name="google.pubsub.v1.DeadLetterPolicy", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="dead_letter_topic", - full_name="google.pubsub.v1.DeadLetterPolicy.dead_letter_topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_delivery_attempts", - full_name="google.pubsub.v1.DeadLetterPolicy.max_delivery_attempts", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2963, - serialized_end=3039, -) - - -_EXPIRATIONPOLICY = _descriptor.Descriptor( - name="ExpirationPolicy", - full_name="google.pubsub.v1.ExpirationPolicy", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="ttl", - full_name="google.pubsub.v1.ExpirationPolicy.ttl", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3041, - serialized_end=3099, -) - - -_PUSHCONFIG_OIDCTOKEN = _descriptor.Descriptor( - name="OidcToken", - full_name="google.pubsub.v1.PushConfig.OidcToken", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="service_account_email", - full_name="google.pubsub.v1.PushConfig.OidcToken.service_account_email", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="audience", - full_name="google.pubsub.v1.PushConfig.OidcToken.audience", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3267, - serialized_end=3327, -) - -_PUSHCONFIG_ATTRIBUTESENTRY = _descriptor.Descriptor( - name="AttributesEntry", - full_name="google.pubsub.v1.PushConfig.AttributesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.PushConfig.AttributesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.PushConfig.AttributesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=869, - serialized_end=918, -) - -_PUSHCONFIG = _descriptor.Descriptor( - name="PushConfig", - full_name="google.pubsub.v1.PushConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="push_endpoint", - full_name="google.pubsub.v1.PushConfig.push_endpoint", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="attributes", - full_name="google.pubsub.v1.PushConfig.attributes", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="oidc_token", - full_name="google.pubsub.v1.PushConfig.oidc_token", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_PUSHCONFIG_OIDCTOKEN, _PUSHCONFIG_ATTRIBUTESENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="authentication_method", - full_name="google.pubsub.v1.PushConfig.authentication_method", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=3102, - serialized_end=3403, -) - - -_RECEIVEDMESSAGE = _descriptor.Descriptor( - name="ReceivedMessage", - full_name="google.pubsub.v1.ReceivedMessage", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="ack_id", - full_name="google.pubsub.v1.ReceivedMessage.ack_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="message", - full_name="google.pubsub.v1.ReceivedMessage.message", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="delivery_attempt", - full_name="google.pubsub.v1.ReceivedMessage.delivery_attempt", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3405, - serialized_end=3514, -) - - -_GETSUBSCRIPTIONREQUEST = _descriptor.Descriptor( - name="GetSubscriptionRequest", - full_name="google.pubsub.v1.GetSubscriptionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.GetSubscriptionRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3516, - serialized_end=3606, -) - - -_UPDATESUBSCRIPTIONREQUEST = _descriptor.Descriptor( - name="UpdateSubscriptionRequest", - full_name="google.pubsub.v1.UpdateSubscriptionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.UpdateSubscriptionRequest.subscription", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.pubsub.v1.UpdateSubscriptionRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3609, - serialized_end=3749, -) - - -_LISTSUBSCRIPTIONSREQUEST = _descriptor.Descriptor( - name="ListSubscriptionsRequest", - full_name="google.pubsub.v1.ListSubscriptionsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="project", - full_name="google.pubsub.v1.ListSubscriptionsRequest.project", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.pubsub.v1.ListSubscriptionsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.pubsub.v1.ListSubscriptionsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3752, - serialized_end=3887, -) - - -_LISTSUBSCRIPTIONSRESPONSE = _descriptor.Descriptor( - name="ListSubscriptionsResponse", - full_name="google.pubsub.v1.ListSubscriptionsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscriptions", - full_name="google.pubsub.v1.ListSubscriptionsResponse.subscriptions", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.pubsub.v1.ListSubscriptionsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3889, - serialized_end=3996, -) - - -_DELETESUBSCRIPTIONREQUEST = _descriptor.Descriptor( - name="DeleteSubscriptionRequest", - full_name="google.pubsub.v1.DeleteSubscriptionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.DeleteSubscriptionRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3998, - serialized_end=4091, -) - - -_MODIFYPUSHCONFIGREQUEST = _descriptor.Descriptor( - name="ModifyPushConfigRequest", - full_name="google.pubsub.v1.ModifyPushConfigRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.ModifyPushConfigRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="push_config", - full_name="google.pubsub.v1.ModifyPushConfigRequest.push_config", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4094, - serialized_end=4241, -) - - -_PULLREQUEST = _descriptor.Descriptor( - name="PullRequest", - full_name="google.pubsub.v1.PullRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.PullRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="return_immediately", - full_name="google.pubsub.v1.PullRequest.return_immediately", - index=1, - number=2, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\030\001\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_messages", - full_name="google.pubsub.v1.PullRequest.max_messages", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4244, - serialized_end=4385, -) - - -_PULLRESPONSE = _descriptor.Descriptor( - name="PullResponse", - full_name="google.pubsub.v1.PullResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="received_messages", - full_name="google.pubsub.v1.PullResponse.received_messages", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4387, - serialized_end=4463, -) - - -_MODIFYACKDEADLINEREQUEST = _descriptor.Descriptor( - name="ModifyAckDeadlineRequest", - full_name="google.pubsub.v1.ModifyAckDeadlineRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.ModifyAckDeadlineRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ack_ids", - full_name="google.pubsub.v1.ModifyAckDeadlineRequest.ack_ids", - index=1, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ack_deadline_seconds", - full_name="google.pubsub.v1.ModifyAckDeadlineRequest.ack_deadline_seconds", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4466, - serialized_end=4615, -) - - -_ACKNOWLEDGEREQUEST = _descriptor.Descriptor( - name="AcknowledgeRequest", - full_name="google.pubsub.v1.AcknowledgeRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.AcknowledgeRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ack_ids", - full_name="google.pubsub.v1.AcknowledgeRequest.ack_ids", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4617, - serialized_end=4725, -) - - -_STREAMINGPULLREQUEST = _descriptor.Descriptor( - name="StreamingPullRequest", - full_name="google.pubsub.v1.StreamingPullRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.StreamingPullRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ack_ids", - full_name="google.pubsub.v1.StreamingPullRequest.ack_ids", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="modify_deadline_seconds", - full_name="google.pubsub.v1.StreamingPullRequest.modify_deadline_seconds", - index=2, - number=3, - type=5, - cpp_type=1, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="modify_deadline_ack_ids", - full_name="google.pubsub.v1.StreamingPullRequest.modify_deadline_ack_ids", - index=3, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="stream_ack_deadline_seconds", - full_name="google.pubsub.v1.StreamingPullRequest.stream_ack_deadline_seconds", - index=4, - number=5, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="client_id", - full_name="google.pubsub.v1.StreamingPullRequest.client_id", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_outstanding_messages", - full_name="google.pubsub.v1.StreamingPullRequest.max_outstanding_messages", - index=6, - number=7, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_outstanding_bytes", - full_name="google.pubsub.v1.StreamingPullRequest.max_outstanding_bytes", - index=7, - number=8, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4728, - serialized_end=5025, -) - - -_STREAMINGPULLRESPONSE = _descriptor.Descriptor( - name="StreamingPullResponse", - full_name="google.pubsub.v1.StreamingPullResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="received_messages", - full_name="google.pubsub.v1.StreamingPullResponse.received_messages", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5027, - serialized_end=5112, -) - - -_CREATESNAPSHOTREQUEST_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.pubsub.v1.CreateSnapshotRequest.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.CreateSnapshotRequest.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.CreateSnapshotRequest.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=543, - serialized_end=588, -) - -_CREATESNAPSHOTREQUEST = _descriptor.Descriptor( - name="CreateSnapshotRequest", - full_name="google.pubsub.v1.CreateSnapshotRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.pubsub.v1.CreateSnapshotRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036pubsub.googleapis.com/Snapshot", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.CreateSnapshotRequest.subscription", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.pubsub.v1.CreateSnapshotRequest.labels", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_CREATESNAPSHOTREQUEST_LABELSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5115, - serialized_end=5374, -) - - -_UPDATESNAPSHOTREQUEST = _descriptor.Descriptor( - name="UpdateSnapshotRequest", - full_name="google.pubsub.v1.UpdateSnapshotRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="snapshot", - full_name="google.pubsub.v1.UpdateSnapshotRequest.snapshot", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.pubsub.v1.UpdateSnapshotRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5377, - serialized_end=5505, -) - - -_SNAPSHOT_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.pubsub.v1.Snapshot.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.Snapshot.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.Snapshot.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=543, - serialized_end=588, -) - -_SNAPSHOT = _descriptor.Descriptor( - name="Snapshot", - full_name="google.pubsub.v1.Snapshot", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.pubsub.v1.Snapshot.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.Snapshot.topic", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="expire_time", - full_name="google.pubsub.v1.Snapshot.expire_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.pubsub.v1.Snapshot.labels", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_SNAPSHOT_LABELSENTRY,], - enum_types=[], - serialized_options=b"\352AI\n\036pubsub.googleapis.com/Snapshot\022'projects/{project}/snapshots/{snapshot}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5508, - serialized_end=5811, -) - - -_GETSNAPSHOTREQUEST = _descriptor.Descriptor( - name="GetSnapshotRequest", - full_name="google.pubsub.v1.GetSnapshotRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="snapshot", - full_name="google.pubsub.v1.GetSnapshotRequest.snapshot", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036pubsub.googleapis.com/Snapshot", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5813, - serialized_end=5891, -) - - -_LISTSNAPSHOTSREQUEST = _descriptor.Descriptor( - name="ListSnapshotsRequest", - full_name="google.pubsub.v1.ListSnapshotsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="project", - full_name="google.pubsub.v1.ListSnapshotsRequest.project", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.pubsub.v1.ListSnapshotsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.pubsub.v1.ListSnapshotsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5894, - serialized_end=6025, -) - - -_LISTSNAPSHOTSRESPONSE = _descriptor.Descriptor( - name="ListSnapshotsResponse", - full_name="google.pubsub.v1.ListSnapshotsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="snapshots", - full_name="google.pubsub.v1.ListSnapshotsResponse.snapshots", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.pubsub.v1.ListSnapshotsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6027, - serialized_end=6122, -) - - -_DELETESNAPSHOTREQUEST = _descriptor.Descriptor( - name="DeleteSnapshotRequest", - full_name="google.pubsub.v1.DeleteSnapshotRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="snapshot", - full_name="google.pubsub.v1.DeleteSnapshotRequest.snapshot", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036pubsub.googleapis.com/Snapshot", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6124, - serialized_end=6205, -) - - -_SEEKREQUEST = _descriptor.Descriptor( - name="SeekRequest", - full_name="google.pubsub.v1.SeekRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.SeekRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="time", - full_name="google.pubsub.v1.SeekRequest.time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="snapshot", - full_name="google.pubsub.v1.SeekRequest.snapshot", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\372A \n\036pubsub.googleapis.com/Snapshot", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="target", - full_name="google.pubsub.v1.SeekRequest.target", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=6208, - serialized_end=6398, -) - - -_SEEKRESPONSE = _descriptor.Descriptor( - name="SeekResponse", - full_name="google.pubsub.v1.SeekResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6400, - serialized_end=6414, -) - -_TOPIC_LABELSENTRY.containing_type = _TOPIC -_TOPIC.fields_by_name["labels"].message_type = _TOPIC_LABELSENTRY -_TOPIC.fields_by_name["message_storage_policy"].message_type = _MESSAGESTORAGEPOLICY -_PUBSUBMESSAGE_ATTRIBUTESENTRY.containing_type = _PUBSUBMESSAGE -_PUBSUBMESSAGE.fields_by_name[ - "attributes" -].message_type = _PUBSUBMESSAGE_ATTRIBUTESENTRY -_PUBSUBMESSAGE.fields_by_name[ - "publish_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_UPDATETOPICREQUEST.fields_by_name["topic"].message_type = _TOPIC -_UPDATETOPICREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_PUBLISHREQUEST.fields_by_name["messages"].message_type = _PUBSUBMESSAGE -_LISTTOPICSRESPONSE.fields_by_name["topics"].message_type = _TOPIC -_SUBSCRIPTION_LABELSENTRY.containing_type = _SUBSCRIPTION -_SUBSCRIPTION.fields_by_name["push_config"].message_type = _PUSHCONFIG -_SUBSCRIPTION.fields_by_name[ - "message_retention_duration" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_SUBSCRIPTION.fields_by_name["labels"].message_type = _SUBSCRIPTION_LABELSENTRY -_SUBSCRIPTION.fields_by_name["expiration_policy"].message_type = _EXPIRATIONPOLICY -_SUBSCRIPTION.fields_by_name["dead_letter_policy"].message_type = _DEADLETTERPOLICY -_SUBSCRIPTION.fields_by_name["retry_policy"].message_type = _RETRYPOLICY -_RETRYPOLICY.fields_by_name[ - "minimum_backoff" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_RETRYPOLICY.fields_by_name[ - "maximum_backoff" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_EXPIRATIONPOLICY.fields_by_name[ - "ttl" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_PUSHCONFIG_OIDCTOKEN.containing_type = _PUSHCONFIG -_PUSHCONFIG_ATTRIBUTESENTRY.containing_type = _PUSHCONFIG -_PUSHCONFIG.fields_by_name["attributes"].message_type = _PUSHCONFIG_ATTRIBUTESENTRY -_PUSHCONFIG.fields_by_name["oidc_token"].message_type = _PUSHCONFIG_OIDCTOKEN -_PUSHCONFIG.oneofs_by_name["authentication_method"].fields.append( - _PUSHCONFIG.fields_by_name["oidc_token"] -) -_PUSHCONFIG.fields_by_name["oidc_token"].containing_oneof = _PUSHCONFIG.oneofs_by_name[ - "authentication_method" -] -_RECEIVEDMESSAGE.fields_by_name["message"].message_type = _PUBSUBMESSAGE -_UPDATESUBSCRIPTIONREQUEST.fields_by_name["subscription"].message_type = _SUBSCRIPTION -_UPDATESUBSCRIPTIONREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTSUBSCRIPTIONSRESPONSE.fields_by_name["subscriptions"].message_type = _SUBSCRIPTION -_MODIFYPUSHCONFIGREQUEST.fields_by_name["push_config"].message_type = _PUSHCONFIG -_PULLRESPONSE.fields_by_name["received_messages"].message_type = _RECEIVEDMESSAGE -_STREAMINGPULLRESPONSE.fields_by_name[ - "received_messages" -].message_type = _RECEIVEDMESSAGE -_CREATESNAPSHOTREQUEST_LABELSENTRY.containing_type = _CREATESNAPSHOTREQUEST -_CREATESNAPSHOTREQUEST.fields_by_name[ - "labels" -].message_type = _CREATESNAPSHOTREQUEST_LABELSENTRY -_UPDATESNAPSHOTREQUEST.fields_by_name["snapshot"].message_type = _SNAPSHOT -_UPDATESNAPSHOTREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_SNAPSHOT_LABELSENTRY.containing_type = _SNAPSHOT -_SNAPSHOT.fields_by_name[ - "expire_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_SNAPSHOT.fields_by_name["labels"].message_type = _SNAPSHOT_LABELSENTRY -_LISTSNAPSHOTSRESPONSE.fields_by_name["snapshots"].message_type = _SNAPSHOT -_SEEKREQUEST.fields_by_name[ - "time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_SEEKREQUEST.oneofs_by_name["target"].fields.append(_SEEKREQUEST.fields_by_name["time"]) -_SEEKREQUEST.fields_by_name["time"].containing_oneof = _SEEKREQUEST.oneofs_by_name[ - "target" -] -_SEEKREQUEST.oneofs_by_name["target"].fields.append( - _SEEKREQUEST.fields_by_name["snapshot"] -) -_SEEKREQUEST.fields_by_name["snapshot"].containing_oneof = _SEEKREQUEST.oneofs_by_name[ - "target" -] -DESCRIPTOR.message_types_by_name["MessageStoragePolicy"] = _MESSAGESTORAGEPOLICY -DESCRIPTOR.message_types_by_name["Topic"] = _TOPIC -DESCRIPTOR.message_types_by_name["PubsubMessage"] = _PUBSUBMESSAGE -DESCRIPTOR.message_types_by_name["GetTopicRequest"] = _GETTOPICREQUEST -DESCRIPTOR.message_types_by_name["UpdateTopicRequest"] = _UPDATETOPICREQUEST -DESCRIPTOR.message_types_by_name["PublishRequest"] = _PUBLISHREQUEST -DESCRIPTOR.message_types_by_name["PublishResponse"] = _PUBLISHRESPONSE -DESCRIPTOR.message_types_by_name["ListTopicsRequest"] = _LISTTOPICSREQUEST -DESCRIPTOR.message_types_by_name["ListTopicsResponse"] = _LISTTOPICSRESPONSE -DESCRIPTOR.message_types_by_name[ - "ListTopicSubscriptionsRequest" -] = _LISTTOPICSUBSCRIPTIONSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListTopicSubscriptionsResponse" -] = _LISTTOPICSUBSCRIPTIONSRESPONSE -DESCRIPTOR.message_types_by_name[ - "ListTopicSnapshotsRequest" -] = _LISTTOPICSNAPSHOTSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListTopicSnapshotsResponse" -] = _LISTTOPICSNAPSHOTSRESPONSE -DESCRIPTOR.message_types_by_name["DeleteTopicRequest"] = _DELETETOPICREQUEST -DESCRIPTOR.message_types_by_name[ - "DetachSubscriptionRequest" -] = _DETACHSUBSCRIPTIONREQUEST -DESCRIPTOR.message_types_by_name[ - "DetachSubscriptionResponse" -] = _DETACHSUBSCRIPTIONRESPONSE -DESCRIPTOR.message_types_by_name["Subscription"] = _SUBSCRIPTION -DESCRIPTOR.message_types_by_name["RetryPolicy"] = _RETRYPOLICY -DESCRIPTOR.message_types_by_name["DeadLetterPolicy"] = _DEADLETTERPOLICY -DESCRIPTOR.message_types_by_name["ExpirationPolicy"] = _EXPIRATIONPOLICY -DESCRIPTOR.message_types_by_name["PushConfig"] = _PUSHCONFIG -DESCRIPTOR.message_types_by_name["ReceivedMessage"] = _RECEIVEDMESSAGE -DESCRIPTOR.message_types_by_name["GetSubscriptionRequest"] = _GETSUBSCRIPTIONREQUEST -DESCRIPTOR.message_types_by_name[ - "UpdateSubscriptionRequest" -] = _UPDATESUBSCRIPTIONREQUEST -DESCRIPTOR.message_types_by_name["ListSubscriptionsRequest"] = _LISTSUBSCRIPTIONSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListSubscriptionsResponse" -] = _LISTSUBSCRIPTIONSRESPONSE -DESCRIPTOR.message_types_by_name[ - "DeleteSubscriptionRequest" -] = _DELETESUBSCRIPTIONREQUEST -DESCRIPTOR.message_types_by_name["ModifyPushConfigRequest"] = _MODIFYPUSHCONFIGREQUEST -DESCRIPTOR.message_types_by_name["PullRequest"] = _PULLREQUEST -DESCRIPTOR.message_types_by_name["PullResponse"] = _PULLRESPONSE -DESCRIPTOR.message_types_by_name["ModifyAckDeadlineRequest"] = _MODIFYACKDEADLINEREQUEST -DESCRIPTOR.message_types_by_name["AcknowledgeRequest"] = _ACKNOWLEDGEREQUEST -DESCRIPTOR.message_types_by_name["StreamingPullRequest"] = _STREAMINGPULLREQUEST -DESCRIPTOR.message_types_by_name["StreamingPullResponse"] = _STREAMINGPULLRESPONSE -DESCRIPTOR.message_types_by_name["CreateSnapshotRequest"] = _CREATESNAPSHOTREQUEST -DESCRIPTOR.message_types_by_name["UpdateSnapshotRequest"] = _UPDATESNAPSHOTREQUEST -DESCRIPTOR.message_types_by_name["Snapshot"] = _SNAPSHOT -DESCRIPTOR.message_types_by_name["GetSnapshotRequest"] = _GETSNAPSHOTREQUEST -DESCRIPTOR.message_types_by_name["ListSnapshotsRequest"] = _LISTSNAPSHOTSREQUEST -DESCRIPTOR.message_types_by_name["ListSnapshotsResponse"] = _LISTSNAPSHOTSRESPONSE -DESCRIPTOR.message_types_by_name["DeleteSnapshotRequest"] = _DELETESNAPSHOTREQUEST -DESCRIPTOR.message_types_by_name["SeekRequest"] = _SEEKREQUEST -DESCRIPTOR.message_types_by_name["SeekResponse"] = _SEEKRESPONSE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -MessageStoragePolicy = _reflection.GeneratedProtocolMessageType( - "MessageStoragePolicy", - (_message.Message,), - { - "DESCRIPTOR": _MESSAGESTORAGEPOLICY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A policy constraining the storage of messages published to the topic. - - - Attributes: - allowed_persistence_regions: - A list of IDs of GCP regions where messages that are published - to the topic may be persisted in storage. Messages published - by publishers running in non-allowed GCP regions (or running - outside of GCP altogether) will be routed for storage in one - of the allowed regions. An empty list means that no regions - are allowed, and is not a valid configuration. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.MessageStoragePolicy) - }, -) -_sym_db.RegisterMessage(MessageStoragePolicy) - -Topic = _reflection.GeneratedProtocolMessageType( - "Topic", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _TOPIC_LABELSENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic.LabelsEntry) - }, - ), - "DESCRIPTOR": _TOPIC, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A topic resource. - - - Attributes: - name: - Required. The name of the topic. It must have the format - ``"projects/{project}/topics/{topic}"``. ``{topic}`` must - start with a letter, and contain only letters (``[A-Za-z]``), - numbers (``[0-9]``), dashes (``-``), underscores (``_``), - periods (``.``), tildes (``~``), plus (``+``) or percent signs - (``%``). It must be between 3 and 255 characters in length, - and it must not start with ``"goog"``. - labels: - See Creating and managing labels. - message_storage_policy: - Policy constraining the set of Google Cloud Platform regions - where messages published to the topic may be stored. If not - present, then no constraints are in effect. - kms_key_name: - The resource name of the Cloud KMS CryptoKey to be used to - protect access to messages published on this topic. The - expected format is - ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic) - }, -) -_sym_db.RegisterMessage(Topic) -_sym_db.RegisterMessage(Topic.LabelsEntry) - -PubsubMessage = _reflection.GeneratedProtocolMessageType( - "PubsubMessage", - (_message.Message,), - { - "AttributesEntry": _reflection.GeneratedProtocolMessageType( - "AttributesEntry", - (_message.Message,), - { - "DESCRIPTOR": _PUBSUBMESSAGE_ATTRIBUTESENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage.AttributesEntry) - }, - ), - "DESCRIPTOR": _PUBSUBMESSAGE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A message that is published by publishers and consumed by subscribers. - The message must contain either a non-empty data field or at least one - attribute. Note that client libraries represent this object - differently depending on the language. See the corresponding client - library documentation for more information. See Quotas and limits for - more information about message limits. - - - Attributes: - data: - The message data field. If this field is empty, the message - must contain at least one attribute. - attributes: - Attributes for this message. If this field is empty, the - message must contain non-empty data. This can be used to - filter messages on the subscription. - message_id: - ID of this message, assigned by the server when the message is - published. Guaranteed to be unique within the topic. This - value may be read by a subscriber that receives a - ``PubsubMessage`` via a ``Pull`` call or a push delivery. It - must not be populated by the publisher in a ``Publish`` call. - publish_time: - The time at which the message was published, populated by the - server when it receives the ``Publish`` call. It must not be - populated by the publisher in a ``Publish`` call. - ordering_key: - If non-empty, identifies related messages for which publish - order should be respected. If a ``Subscription`` has - ``enable_message_ordering`` set to ``true``, messages - published with the same non-empty ``ordering_key`` value will - be delivered to subscribers in the order in which they are - received by the Pub/Sub system. All ``PubsubMessage``\ s - published in a given ``PublishRequest`` must specify the same - ``ordering_key`` value. EXPERIMENTAL: This feature is part of - a closed alpha release. This API might be changed in backward- - incompatible ways and is not recommended for production use. - It is not subject to any SLA or deprecation policy. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage) - }, -) -_sym_db.RegisterMessage(PubsubMessage) -_sym_db.RegisterMessage(PubsubMessage.AttributesEntry) - -GetTopicRequest = _reflection.GeneratedProtocolMessageType( - "GetTopicRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETTOPICREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the GetTopic method. - - - Attributes: - topic: - Required. The name of the topic to get. Format is - ``projects/{project}/topics/{topic}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetTopicRequest) - }, -) -_sym_db.RegisterMessage(GetTopicRequest) - -UpdateTopicRequest = _reflection.GeneratedProtocolMessageType( - "UpdateTopicRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATETOPICREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the UpdateTopic method. - - - Attributes: - topic: - Required. The updated topic object. - update_mask: - Required. Indicates which fields in the provided topic to - update. Must be specified and non-empty. Note that if - ``update_mask`` contains “message_storage_policy” but the - ``message_storage_policy`` is not set in the ``topic`` - provided above, then the updated value is determined by the - policy configured at the project or organization level. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateTopicRequest) - }, -) -_sym_db.RegisterMessage(UpdateTopicRequest) - -PublishRequest = _reflection.GeneratedProtocolMessageType( - "PublishRequest", - (_message.Message,), - { - "DESCRIPTOR": _PUBLISHREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the Publish method. - - - Attributes: - topic: - Required. The messages in the request will be published on - this topic. Format is ``projects/{project}/topics/{topic}``. - messages: - Required. The messages to publish. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishRequest) - }, -) -_sym_db.RegisterMessage(PublishRequest) - -PublishResponse = _reflection.GeneratedProtocolMessageType( - "PublishResponse", - (_message.Message,), - { - "DESCRIPTOR": _PUBLISHRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``Publish`` method. - - - Attributes: - message_ids: - The server-assigned ID of each published message, in the same - order as the messages in the request. IDs are guaranteed to be - unique within the topic. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishResponse) - }, -) -_sym_db.RegisterMessage(PublishResponse) - -ListTopicsRequest = _reflection.GeneratedProtocolMessageType( - "ListTopicsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``ListTopics`` method. - - - Attributes: - project: - Required. The name of the project in which to list topics. - Format is ``projects/{project-id}``. - page_size: - Maximum number of topics to return. - page_token: - The value returned by the last ``ListTopicsResponse``; - indicates that this is a continuation of a prior - ``ListTopics`` call, and that the system should return the - next page of data. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsRequest) - }, -) -_sym_db.RegisterMessage(ListTopicsRequest) - -ListTopicsResponse = _reflection.GeneratedProtocolMessageType( - "ListTopicsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``ListTopics`` method. - - - Attributes: - topics: - The resulting topics. - next_page_token: - If not empty, indicates that there may be more topics that - match the request; this value should be passed in a new - ``ListTopicsRequest``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsResponse) - }, -) -_sym_db.RegisterMessage(ListTopicsResponse) - -ListTopicSubscriptionsRequest = _reflection.GeneratedProtocolMessageType( - "ListTopicSubscriptionsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSUBSCRIPTIONSREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``ListTopicSubscriptions`` method. - - - Attributes: - topic: - Required. The name of the topic that subscriptions are - attached to. Format is ``projects/{project}/topics/{topic}``. - page_size: - Maximum number of subscription names to return. - page_token: - The value returned by the last - ``ListTopicSubscriptionsResponse``; indicates that this is a - continuation of a prior ``ListTopicSubscriptions`` call, and - that the system should return the next page of data. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsRequest) - }, -) -_sym_db.RegisterMessage(ListTopicSubscriptionsRequest) - -ListTopicSubscriptionsResponse = _reflection.GeneratedProtocolMessageType( - "ListTopicSubscriptionsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSUBSCRIPTIONSRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``ListTopicSubscriptions`` method. - - - Attributes: - subscriptions: - The names of subscriptions attached to the topic specified in - the request. - next_page_token: - If not empty, indicates that there may be more subscriptions - that match the request; this value should be passed in a new - ``ListTopicSubscriptionsRequest`` to get more subscriptions. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsResponse) - }, -) -_sym_db.RegisterMessage(ListTopicSubscriptionsResponse) - -ListTopicSnapshotsRequest = _reflection.GeneratedProtocolMessageType( - "ListTopicSnapshotsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSNAPSHOTSREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``ListTopicSnapshots`` method. - - - Attributes: - topic: - Required. The name of the topic that snapshots are attached - to. Format is ``projects/{project}/topics/{topic}``. - page_size: - Maximum number of snapshot names to return. - page_token: - The value returned by the last ``ListTopicSnapshotsResponse``; - indicates that this is a continuation of a prior - ``ListTopicSnapshots`` call, and that the system should return - the next page of data. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSnapshotsRequest) - }, -) -_sym_db.RegisterMessage(ListTopicSnapshotsRequest) - -ListTopicSnapshotsResponse = _reflection.GeneratedProtocolMessageType( - "ListTopicSnapshotsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSNAPSHOTSRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``ListTopicSnapshots`` method. - - - Attributes: - snapshots: - The names of the snapshots that match the request. - next_page_token: - If not empty, indicates that there may be more snapshots that - match the request; this value should be passed in a new - ``ListTopicSnapshotsRequest`` to get more snapshots. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSnapshotsResponse) - }, -) -_sym_db.RegisterMessage(ListTopicSnapshotsResponse) - -DeleteTopicRequest = _reflection.GeneratedProtocolMessageType( - "DeleteTopicRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETETOPICREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``DeleteTopic`` method. - - - Attributes: - topic: - Required. Name of the topic to delete. Format is - ``projects/{project}/topics/{topic}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteTopicRequest) - }, -) -_sym_db.RegisterMessage(DeleteTopicRequest) - -DetachSubscriptionRequest = _reflection.GeneratedProtocolMessageType( - "DetachSubscriptionRequest", - (_message.Message,), - { - "DESCRIPTOR": _DETACHSUBSCRIPTIONREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the DetachSubscription method. - - - Attributes: - subscription: - Required. The subscription to detach. Format is - ``projects/{project}/subscriptions/{subscription}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DetachSubscriptionRequest) - }, -) -_sym_db.RegisterMessage(DetachSubscriptionRequest) - -DetachSubscriptionResponse = _reflection.GeneratedProtocolMessageType( - "DetachSubscriptionResponse", - (_message.Message,), - { - "DESCRIPTOR": _DETACHSUBSCRIPTIONRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the DetachSubscription method. Reserved for future use.""", - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DetachSubscriptionResponse) - }, -) -_sym_db.RegisterMessage(DetachSubscriptionResponse) - -Subscription = _reflection.GeneratedProtocolMessageType( - "Subscription", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _SUBSCRIPTION_LABELSENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription.LabelsEntry) - }, - ), - "DESCRIPTOR": _SUBSCRIPTION, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A subscription resource. - - - Attributes: - name: - Required. The name of the subscription. It must have the - format ``"projects/{project}/subscriptions/{subscription}"``. - ``{subscription}`` must start with a letter, and contain only - letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), - underscores (``_``), periods (``.``), tildes (``~``), plus - (``+``) or percent signs (``%``). It must be between 3 and 255 - characters in length, and it must not start with ``"goog"``. - topic: - Required. The name of the topic from which this subscription - is receiving messages. Format is - ``projects/{project}/topics/{topic}``. The value of this field - will be ``_deleted-topic_`` if the topic has been deleted. - push_config: - If push delivery is used with this subscription, this field is - used to configure it. An empty ``pushConfig`` signifies that - the subscriber will pull and ack messages using API methods. - ack_deadline_seconds: - The approximate amount of time (on a best-effort basis) - Pub/Sub waits for the subscriber to acknowledge receipt before - resending the message. In the interval after the message is - delivered and before it is acknowledged, it is considered to - be outstanding. During that time period, the message will not - be redelivered (on a best-effort basis). For pull - subscriptions, this value is used as the initial value for the - ack deadline. To override this value for a given message, call - ``ModifyAckDeadline`` with the corresponding ``ack_id`` if - using non-streaming pull or send the ``ack_id`` in a - ``StreamingModifyAckDeadlineRequest`` if using streaming pull. - The minimum custom deadline you can specify is 10 seconds. The - maximum custom deadline you can specify is 600 seconds (10 - minutes). If this parameter is 0, a default value of 10 - seconds is used. For push delivery, this value is also used - to set the request timeout for the call to the push endpoint. - If the subscriber never acknowledges the message, the Pub/Sub - system will eventually redeliver the message. - retain_acked_messages: - Indicates whether to retain acknowledged messages. If true, - then messages are not expunged from the subscription’s - backlog, even if they are acknowledged, until they fall out of - the ``message_retention_duration`` window. This must be true - if you would like to Seek to a timestamp. - message_retention_duration: - How long to retain unacknowledged messages in the - subscription’s backlog, from the moment a message is - published. If ``retain_acked_messages`` is true, then this - also configures the retention of acknowledged messages, and - thus configures how far back in time a ``Seek`` can be done. - Defaults to 7 days. Cannot be more than 7 days or less than 10 - minutes. - labels: - See Creating and managing labels. - enable_message_ordering: - If true, messages published with the same ``ordering_key`` in - ``PubsubMessage`` will be delivered to the subscribers in the - order in which they are received by the Pub/Sub system. - Otherwise, they may be delivered in any order. EXPERIMENTAL: - This feature is part of a closed alpha release. This API might - be changed in backward-incompatible ways and is not - recommended for production use. It is not subject to any SLA - or deprecation policy. - expiration_policy: - A policy that specifies the conditions for this subscription’s - expiration. A subscription is considered active as long as any - connected subscriber is successfully consuming messages from - the subscription or is issuing operations on the subscription. - If ``expiration_policy`` is not set, a *default policy* with - ``ttl`` of 31 days will be used. The minimum allowed value for - ``expiration_policy.ttl`` is 1 day. - filter: - An expression written in the Pub/Sub `filter language - `__. If non- - empty, then only ``PubsubMessage``\ s whose ``attributes`` - field matches the filter are delivered on this subscription. - If empty, then no messages are filtered out. - dead_letter_policy: - A policy that specifies the conditions for dead lettering - messages in this subscription. If dead_letter_policy is not - set, dead lettering is disabled. The Cloud Pub/Sub service - account associated with this subscriptions’s parent project - (i.e., service-{project_number}@gcp-sa- - pubsub.iam.gserviceaccount.com) must have permission to - Acknowledge() messages on this subscription. - retry_policy: - A policy that specifies how Pub/Sub retries message delivery - for this subscription. If not set, the default retry policy - is applied. This generally implies that messages will be - retried as soon as possible for healthy subscribers. - RetryPolicy will be triggered on NACKs or acknowledgement - deadline exceeded events for a given message. - detached: - Indicates whether the subscription is detached from its topic. - Detached subscriptions don’t receive messages from their topic - and don’t retain any backlog. ``Pull`` and ``StreamingPull`` - requests will return FAILED_PRECONDITION. If the subscription - is a push subscription, pushes to the endpoint will not be - made. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription) - }, -) -_sym_db.RegisterMessage(Subscription) -_sym_db.RegisterMessage(Subscription.LabelsEntry) - -RetryPolicy = _reflection.GeneratedProtocolMessageType( - "RetryPolicy", - (_message.Message,), - { - "DESCRIPTOR": _RETRYPOLICY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A policy that specifies how Cloud Pub/Sub retries message delivery. - Retry delay will be exponential based on provided minimum and maximum - backoffs. https://en.wikipedia.org/wiki/Exponential_backoff. - RetryPolicy will be triggered on NACKs or acknowledgement deadline - exceeded events for a given message. Retry Policy is implemented on a - best effort basis. At times, the delay between consecutive deliveries - may not match the configuration. That is, delay can be more or less - than configured backoff. - - - Attributes: - minimum_backoff: - The minimum delay between consecutive deliveries of a given - message. Value should be between 0 and 600 seconds. Defaults - to 10 seconds. - maximum_backoff: - The maximum delay between consecutive deliveries of a given - message. Value should be between 0 and 600 seconds. Defaults - to 600 seconds. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.RetryPolicy) - }, -) -_sym_db.RegisterMessage(RetryPolicy) - -DeadLetterPolicy = _reflection.GeneratedProtocolMessageType( - "DeadLetterPolicy", - (_message.Message,), - { - "DESCRIPTOR": _DEADLETTERPOLICY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Dead lettering is done on a best effort basis. The same message might - be dead lettered multiple times. If validation on any of the fields - fails at subscription creation/updation, the create/update - subscription request will fail. - - - Attributes: - dead_letter_topic: - The name of the topic to which dead letter messages should be - published. Format is ``projects/{project}/topics/{topic}``.The - Cloud Pub/Sub service account associated with the enclosing - subscription’s parent project (i.e., - service-{project_number}@gcp-sa- - pubsub.iam.gserviceaccount.com) must have permission to - Publish() to this topic. The operation will fail if the topic - does not exist. Users should ensure that there is a - subscription attached to this topic since messages published - to a topic with no subscriptions are lost. - max_delivery_attempts: - The maximum number of delivery attempts for any message. The - value must be between 5 and 100. The number of delivery - attempts is defined as 1 + (the sum of number of NACKs and - number of times the acknowledgement deadline has been exceeded - for the message). A NACK is any call to ModifyAckDeadline - with a 0 deadline. Note that client libraries may - automatically extend ack_deadlines. This field will be - honored on a best effort basis. If this parameter is 0, a - default value of 5 is used. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeadLetterPolicy) - }, -) -_sym_db.RegisterMessage(DeadLetterPolicy) - -ExpirationPolicy = _reflection.GeneratedProtocolMessageType( - "ExpirationPolicy", - (_message.Message,), - { - "DESCRIPTOR": _EXPIRATIONPOLICY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A policy that specifies the conditions for resource expiration (i.e., - automatic resource deletion). - - - Attributes: - ttl: - Specifies the “time-to-live” duration for an associated - resource. The resource expires if it is not active for a - period of ``ttl``. The definition of “activity” depends on the - type of the associated resource. The minimum and maximum - allowed values for ``ttl`` depend on the type of the - associated resource, as well. If ``ttl`` is not set, the - associated resource never expires. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ExpirationPolicy) - }, -) -_sym_db.RegisterMessage(ExpirationPolicy) - -PushConfig = _reflection.GeneratedProtocolMessageType( - "PushConfig", - (_message.Message,), - { - "OidcToken": _reflection.GeneratedProtocolMessageType( - "OidcToken", - (_message.Message,), - { - "DESCRIPTOR": _PUSHCONFIG_OIDCTOKEN, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Contains information needed for generating an `OpenID Connect token - `__. - - - Attributes: - service_account_email: - \ `Service account email - `__ to be - used for generating the OIDC token. The caller (for - CreateSubscription, UpdateSubscription, and ModifyPushConfig - RPCs) must have the iam.serviceAccounts.actAs permission for - the service account. - audience: - Audience to be used when generating OIDC token. The audience - claim identifies the recipients that the JWT is intended for. - The audience value is a single case-sensitive string. Having - multiple values (array) for the audience field is not - supported. More info about the OIDC JWT token audience here: - https://tools.ietf.org/html/rfc7519#section-4.1.3 Note: if not - specified, the Push endpoint URL will be used. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.OidcToken) - }, - ), - "AttributesEntry": _reflection.GeneratedProtocolMessageType( - "AttributesEntry", - (_message.Message,), - { - "DESCRIPTOR": _PUSHCONFIG_ATTRIBUTESENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.AttributesEntry) - }, - ), - "DESCRIPTOR": _PUSHCONFIG, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Configuration for a push delivery endpoint. - - - Attributes: - push_endpoint: - A URL locating the endpoint to which messages should be - pushed. For example, a Webhook endpoint might use - ``https://example.com/push``. - attributes: - Endpoint configuration attributes that can be used to control - different aspects of the message delivery. The only currently - supported attribute is ``x-goog-version``, which you can use - to change the format of the pushed message. This attribute - indicates the version of the data expected by the endpoint. - This controls the shape of the pushed message (i.e., its - fields and metadata). If not present during the - ``CreateSubscription`` call, it will default to the version of - the Pub/Sub API used to make such call. If not present in a - ``ModifyPushConfig`` call, its value will not be changed. - ``GetSubscription`` calls will always return a valid version, - even if the subscription was created without this attribute. - The only supported values for the ``x-goog-version`` attribute - are: - ``v1beta1``: uses the push format defined in the - v1beta1 Pub/Sub API. - ``v1`` or ``v1beta2``: uses the push - format defined in the v1 Pub/Sub API. For example: .. - raw:: html
attributes { "x-goog-version": "v1"
-          } 
- authentication_method: - An authentication method used by push endpoints to verify the - source of push requests. This can be used with push endpoints - that are private by default to allow requests only from the - Cloud Pub/Sub system, for example. This field is optional and - should be set only by users interested in authenticated push. - oidc_token: - If specified, Pub/Sub will generate and attach an OIDC JWT - token as an ``Authorization`` header in the HTTP request for - every pushed message. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig) - }, -) -_sym_db.RegisterMessage(PushConfig) -_sym_db.RegisterMessage(PushConfig.OidcToken) -_sym_db.RegisterMessage(PushConfig.AttributesEntry) - -ReceivedMessage = _reflection.GeneratedProtocolMessageType( - "ReceivedMessage", - (_message.Message,), - { - "DESCRIPTOR": _RECEIVEDMESSAGE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A message and its corresponding acknowledgment ID. - - - Attributes: - ack_id: - This ID can be used to acknowledge the received message. - message: - The message. - delivery_attempt: - The approximate number of times that Cloud Pub/Sub has - attempted to deliver the associated message to a subscriber. - More precisely, this is 1 + (number of NACKs) + (number of - ack_deadline exceeds) for this message. A NACK is any call to - ModifyAckDeadline with a 0 deadline. An ack_deadline exceeds - event is whenever a message is not acknowledged within - ack_deadline. Note that ack_deadline is initially - Subscription.ackDeadlineSeconds, but may get extended - automatically by the client library. Upon the first delivery - of a given message, ``delivery_attempt`` will have a value of - 1. The value is calculated at best effort and is approximate. - If a DeadLetterPolicy is not set on the subscription, this - will be 0. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ReceivedMessage) - }, -) -_sym_db.RegisterMessage(ReceivedMessage) - -GetSubscriptionRequest = _reflection.GeneratedProtocolMessageType( - "GetSubscriptionRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETSUBSCRIPTIONREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the GetSubscription method. - - - Attributes: - subscription: - Required. The name of the subscription to get. Format is - ``projects/{project}/subscriptions/{sub}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSubscriptionRequest) - }, -) -_sym_db.RegisterMessage(GetSubscriptionRequest) - -UpdateSubscriptionRequest = _reflection.GeneratedProtocolMessageType( - "UpdateSubscriptionRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATESUBSCRIPTIONREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the UpdateSubscription method. - - - Attributes: - subscription: - Required. The updated subscription object. - update_mask: - Required. Indicates which fields in the provided subscription - to update. Must be specified and non-empty. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSubscriptionRequest) - }, -) -_sym_db.RegisterMessage(UpdateSubscriptionRequest) - -ListSubscriptionsRequest = _reflection.GeneratedProtocolMessageType( - "ListSubscriptionsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTSUBSCRIPTIONSREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``ListSubscriptions`` method. - - - Attributes: - project: - Required. The name of the project in which to list - subscriptions. Format is ``projects/{project-id}``. - page_size: - Maximum number of subscriptions to return. - page_token: - The value returned by the last ``ListSubscriptionsResponse``; - indicates that this is a continuation of a prior - ``ListSubscriptions`` call, and that the system should return - the next page of data. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsRequest) - }, -) -_sym_db.RegisterMessage(ListSubscriptionsRequest) - -ListSubscriptionsResponse = _reflection.GeneratedProtocolMessageType( - "ListSubscriptionsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTSUBSCRIPTIONSRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``ListSubscriptions`` method. - - - Attributes: - subscriptions: - The subscriptions that match the request. - next_page_token: - If not empty, indicates that there may be more subscriptions - that match the request; this value should be passed in a new - ``ListSubscriptionsRequest`` to get more subscriptions. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsResponse) - }, -) -_sym_db.RegisterMessage(ListSubscriptionsResponse) - -DeleteSubscriptionRequest = _reflection.GeneratedProtocolMessageType( - "DeleteSubscriptionRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETESUBSCRIPTIONREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the DeleteSubscription method. - - - Attributes: - subscription: - Required. The subscription to delete. Format is - ``projects/{project}/subscriptions/{sub}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSubscriptionRequest) - }, -) -_sym_db.RegisterMessage(DeleteSubscriptionRequest) - -ModifyPushConfigRequest = _reflection.GeneratedProtocolMessageType( - "ModifyPushConfigRequest", - (_message.Message,), - { - "DESCRIPTOR": _MODIFYPUSHCONFIGREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ModifyPushConfig method. - - - Attributes: - subscription: - Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - push_config: - Required. The push configuration for future deliveries. An - empty ``pushConfig`` indicates that the Pub/Sub system should - stop pushing messages from the given subscription and allow - messages to be pulled and acknowledged - effectively pausing - the subscription if ``Pull`` or ``StreamingPull`` is not - called. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyPushConfigRequest) - }, -) -_sym_db.RegisterMessage(ModifyPushConfigRequest) - -PullRequest = _reflection.GeneratedProtocolMessageType( - "PullRequest", - (_message.Message,), - { - "DESCRIPTOR": _PULLREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``Pull`` method. - - - Attributes: - subscription: - Required. The subscription from which messages should be - pulled. Format is ``projects/{project}/subscriptions/{sub}``. - return_immediately: - Optional. If this field set to true, the system will respond - immediately even if it there are no messages available to - return in the ``Pull`` response. Otherwise, the system may - wait (for a bounded amount of time) until at least one message - is available, rather than returning no messages. Warning: - setting this field to ``true`` is discouraged because it - adversely impacts the performance of ``Pull`` operations. We - recommend that users do not set this field. - max_messages: - Required. The maximum number of messages to return for this - request. Must be a positive integer. The Pub/Sub system may - return fewer than the number specified. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullRequest) - }, -) -_sym_db.RegisterMessage(PullRequest) - -PullResponse = _reflection.GeneratedProtocolMessageType( - "PullResponse", - (_message.Message,), - { - "DESCRIPTOR": _PULLRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``Pull`` method. - - - Attributes: - received_messages: - Received Pub/Sub messages. The list will be empty if there are - no more messages available in the backlog. For JSON, the - response can be entirely empty. The Pub/Sub system may return - fewer than the ``maxMessages`` requested even if there are - more messages available in the backlog. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullResponse) - }, -) -_sym_db.RegisterMessage(PullResponse) - -ModifyAckDeadlineRequest = _reflection.GeneratedProtocolMessageType( - "ModifyAckDeadlineRequest", - (_message.Message,), - { - "DESCRIPTOR": _MODIFYACKDEADLINEREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ModifyAckDeadline method. - - - Attributes: - subscription: - Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - ack_ids: - Required. List of acknowledgment IDs. - ack_deadline_seconds: - Required. The new ack deadline with respect to the time this - request was sent to the Pub/Sub system. For example, if the - value is 10, the new ack deadline will expire 10 seconds after - the ``ModifyAckDeadline`` call was made. Specifying zero might - immediately make the message available for delivery to another - subscriber client. This typically results in an increase in - the rate of message redeliveries (that is, duplicates). The - minimum deadline you can specify is 0 seconds. The maximum - deadline you can specify is 600 seconds (10 minutes). - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyAckDeadlineRequest) - }, -) -_sym_db.RegisterMessage(ModifyAckDeadlineRequest) - -AcknowledgeRequest = _reflection.GeneratedProtocolMessageType( - "AcknowledgeRequest", - (_message.Message,), - { - "DESCRIPTOR": _ACKNOWLEDGEREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the Acknowledge method. - - - Attributes: - subscription: - Required. The subscription whose message is being - acknowledged. Format is - ``projects/{project}/subscriptions/{sub}``. - ack_ids: - Required. The acknowledgment ID for the messages being - acknowledged that was returned by the Pub/Sub system in the - ``Pull`` response. Must not be empty. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.AcknowledgeRequest) - }, -) -_sym_db.RegisterMessage(AcknowledgeRequest) - -StreamingPullRequest = _reflection.GeneratedProtocolMessageType( - "StreamingPullRequest", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGPULLREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``StreamingPull`` streaming RPC method. This request - is used to establish the initial stream as well as to stream - acknowledgements and ack deadline modifications from the client to the - server. - - - Attributes: - subscription: - Required. The subscription for which to initialize the new - stream. This must be provided in the first request on the - stream, and must not be set in subsequent requests from client - to server. Format is - ``projects/{project}/subscriptions/{sub}``. - ack_ids: - List of acknowledgement IDs for acknowledging previously - received messages (received on this stream or a different - stream). If an ack ID has expired, the corresponding message - may be redelivered later. Acknowledging a message more than - once will not result in an error. If the acknowledgement ID is - malformed, the stream will be aborted with status - ``INVALID_ARGUMENT``. - modify_deadline_seconds: - The list of new ack deadlines for the IDs listed in - ``modify_deadline_ack_ids``. The size of this list must be the - same as the size of ``modify_deadline_ack_ids``. If it differs - the stream will be aborted with ``INVALID_ARGUMENT``. Each - element in this list is applied to the element in the same - position in ``modify_deadline_ack_ids``. The new ack deadline - is with respect to the time this request was sent to the - Pub/Sub system. Must be >= 0. For example, if the value is 10, - the new ack deadline will expire 10 seconds after this request - is received. If the value is 0, the message is immediately - made available for another streaming or non-streaming pull - request. If the value is < 0 (an error), the stream will be - aborted with status ``INVALID_ARGUMENT``. - modify_deadline_ack_ids: - List of acknowledgement IDs whose deadline will be modified - based on the corresponding element in - ``modify_deadline_seconds``. This field can be used to - indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if - the processing was interrupted. - stream_ack_deadline_seconds: - Required. The ack deadline to use for the stream. This must be - provided in the first request on the stream, but it can also - be updated on subsequent requests from client to server. The - minimum deadline you can specify is 10 seconds. The maximum - deadline you can specify is 600 seconds (10 minutes). - client_id: - A unique identifier that is used to distinguish client - instances from each other. Only needs to be provided on the - initial request. When a stream disconnects and reconnects for - the same stream, the client_id should be set to the same value - so that state associated with the old stream can be - transferred to the new stream. The same client_id should not - be used for different client instances. - max_outstanding_messages: - Flow control settings for the maximum number of outstanding - messages. When there are ``max_outstanding_messages`` or more - currently sent to the streaming pull client that have not yet - been acked or nacked, the server stops sending more messages. - The sending of messages resumes once the number of outstanding - messages is less than this value. If the value is <= 0, there - is no limit to the number of outstanding messages. This - property can only be set on the initial StreamingPullRequest. - If it is set on a subsequent request, the stream will be - aborted with status ``INVALID_ARGUMENT``. - max_outstanding_bytes: - Flow control settings for the maximum number of outstanding - bytes. When there are ``max_outstanding_bytes`` or more worth - of messages currently sent to the streaming pull client that - have not yet been acked or nacked, the server will stop - sending more messages. The sending of messages resumes once - the number of outstanding bytes is less than this value. If - the value is <= 0, there is no limit to the number of - outstanding bytes. This property can only be set on the - initial StreamingPullRequest. If it is set on a subsequent - request, the stream will be aborted with status - ``INVALID_ARGUMENT``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullRequest) - }, -) -_sym_db.RegisterMessage(StreamingPullRequest) - -StreamingPullResponse = _reflection.GeneratedProtocolMessageType( - "StreamingPullResponse", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGPULLRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``StreamingPull`` method. This response is used to - stream messages from the server to the client. - - - Attributes: - received_messages: - Received Pub/Sub messages. This will not be empty. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullResponse) - }, -) -_sym_db.RegisterMessage(StreamingPullResponse) - -CreateSnapshotRequest = _reflection.GeneratedProtocolMessageType( - "CreateSnapshotRequest", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _CREATESNAPSHOTREQUEST_LABELSENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest.LabelsEntry) - }, - ), - "DESCRIPTOR": _CREATESNAPSHOTREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``CreateSnapshot`` method. - - - Attributes: - name: - Required. User-provided name for this snapshot. If the name is - not provided in the request, the server will assign a random - name for this snapshot on the same project as the - subscription. Note that for REST API requests, you must - specify a name. See the resource name rules. Format is - ``projects/{project}/snapshots/{snap}``. - subscription: - Required. The subscription whose backlog the snapshot retains. - Specifically, the created snapshot is guaranteed to retain: - (a) The existing backlog on the subscription. More precisely, - this is defined as the messages in the subscription’s backlog - that are unacknowledged upon the successful completion of the - ``CreateSnapshot`` request; as well as: (b) Any messages - published to the subscription’s topic following the successful - completion of the CreateSnapshot request. Format is - ``projects/{project}/subscriptions/{sub}``. - labels: - See Creating and managing labels. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest) - }, -) -_sym_db.RegisterMessage(CreateSnapshotRequest) -_sym_db.RegisterMessage(CreateSnapshotRequest.LabelsEntry) - -UpdateSnapshotRequest = _reflection.GeneratedProtocolMessageType( - "UpdateSnapshotRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATESNAPSHOTREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the UpdateSnapshot method. - - - Attributes: - snapshot: - Required. The updated snapshot object. - update_mask: - Required. Indicates which fields in the provided snapshot to - update. Must be specified and non-empty. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSnapshotRequest) - }, -) -_sym_db.RegisterMessage(UpdateSnapshotRequest) - -Snapshot = _reflection.GeneratedProtocolMessageType( - "Snapshot", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _SNAPSHOT_LABELSENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot.LabelsEntry) - }, - ), - "DESCRIPTOR": _SNAPSHOT, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A snapshot resource. Snapshots are used in Seek operations, which - allow you to manage message acknowledgments in bulk. That is, you can - set the acknowledgment state of messages in an existing subscription - to the state captured by a snapshot. - - - Attributes: - name: - The name of the snapshot. - topic: - The name of the topic from which this snapshot is retaining - messages. - expire_time: - The snapshot is guaranteed to exist up until this time. A - newly-created snapshot expires no later than 7 days from the - time of its creation. Its exact lifetime is determined at - creation by the existing backlog in the source subscription. - Specifically, the lifetime of the snapshot is ``7 days - (age - of oldest unacked message in the subscription)``. For example, - consider a subscription whose oldest unacked message is 3 days - old. If a snapshot is created from this subscription, the - snapshot – which will always capture this 3-day-old backlog as - long as the snapshot exists – will expire in 4 days. The - service will refuse to create a snapshot that would expire in - less than 1 hour after creation. - labels: - See Creating and managing labels. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot) - }, -) -_sym_db.RegisterMessage(Snapshot) -_sym_db.RegisterMessage(Snapshot.LabelsEntry) - -GetSnapshotRequest = _reflection.GeneratedProtocolMessageType( - "GetSnapshotRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETSNAPSHOTREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the GetSnapshot method. - - - Attributes: - snapshot: - Required. The name of the snapshot to get. Format is - ``projects/{project}/snapshots/{snap}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSnapshotRequest) - }, -) -_sym_db.RegisterMessage(GetSnapshotRequest) - -ListSnapshotsRequest = _reflection.GeneratedProtocolMessageType( - "ListSnapshotsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTSNAPSHOTSREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``ListSnapshots`` method. - - - Attributes: - project: - Required. The name of the project in which to list snapshots. - Format is ``projects/{project-id}``. - page_size: - Maximum number of snapshots to return. - page_token: - The value returned by the last ``ListSnapshotsResponse``; - indicates that this is a continuation of a prior - ``ListSnapshots`` call, and that the system should return the - next page of data. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsRequest) - }, -) -_sym_db.RegisterMessage(ListSnapshotsRequest) - -ListSnapshotsResponse = _reflection.GeneratedProtocolMessageType( - "ListSnapshotsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTSNAPSHOTSRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``ListSnapshots`` method. - - - Attributes: - snapshots: - The resulting snapshots. - next_page_token: - If not empty, indicates that there may be more snapshot that - match the request; this value should be passed in a new - ``ListSnapshotsRequest``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsResponse) - }, -) -_sym_db.RegisterMessage(ListSnapshotsResponse) - -DeleteSnapshotRequest = _reflection.GeneratedProtocolMessageType( - "DeleteSnapshotRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETESNAPSHOTREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``DeleteSnapshot`` method. - - - Attributes: - snapshot: - Required. The name of the snapshot to delete. Format is - ``projects/{project}/snapshots/{snap}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSnapshotRequest) - }, -) -_sym_db.RegisterMessage(DeleteSnapshotRequest) - -SeekRequest = _reflection.GeneratedProtocolMessageType( - "SeekRequest", - (_message.Message,), - { - "DESCRIPTOR": _SEEKREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``Seek`` method. - - - Attributes: - subscription: - Required. The subscription to affect. - time: - The time to seek to. Messages retained in the subscription - that were published before this time are marked as - acknowledged, and messages retained in the subscription that - were published after this time are marked as unacknowledged. - Note that this operation affects only those messages retained - in the subscription (configured by the combination of - ``message_retention_duration`` and ``retain_acked_messages``). - For example, if ``time`` corresponds to a point before the - message retention window (or to a point before the system’s - notion of the subscription creation time), only retained - messages will be marked as unacknowledged, and already- - expunged messages will not be restored. - snapshot: - The snapshot to seek to. The snapshot’s topic must be the same - as that of the provided subscription. Format is - ``projects/{project}/snapshots/{snap}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekRequest) - }, -) -_sym_db.RegisterMessage(SeekRequest) - -SeekResponse = _reflection.GeneratedProtocolMessageType( - "SeekResponse", - (_message.Message,), - { - "DESCRIPTOR": _SEEKRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``Seek`` method (this response is empty).""", - # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekResponse) - }, -) -_sym_db.RegisterMessage(SeekResponse) - - -DESCRIPTOR._options = None -_TOPIC_LABELSENTRY._options = None -_TOPIC.fields_by_name["name"]._options = None -_TOPIC._options = None -_PUBSUBMESSAGE_ATTRIBUTESENTRY._options = None -_GETTOPICREQUEST.fields_by_name["topic"]._options = None -_UPDATETOPICREQUEST.fields_by_name["topic"]._options = None -_UPDATETOPICREQUEST.fields_by_name["update_mask"]._options = None -_PUBLISHREQUEST.fields_by_name["topic"]._options = None -_PUBLISHREQUEST.fields_by_name["messages"]._options = None -_LISTTOPICSREQUEST.fields_by_name["project"]._options = None -_LISTTOPICSUBSCRIPTIONSREQUEST.fields_by_name["topic"]._options = None -_LISTTOPICSUBSCRIPTIONSRESPONSE.fields_by_name["subscriptions"]._options = None -_LISTTOPICSNAPSHOTSREQUEST.fields_by_name["topic"]._options = None -_DELETETOPICREQUEST.fields_by_name["topic"]._options = None -_DETACHSUBSCRIPTIONREQUEST.fields_by_name["subscription"]._options = None -_SUBSCRIPTION_LABELSENTRY._options = None -_SUBSCRIPTION.fields_by_name["name"]._options = None -_SUBSCRIPTION.fields_by_name["topic"]._options = None -_SUBSCRIPTION._options = None -_PUSHCONFIG_ATTRIBUTESENTRY._options = None -_GETSUBSCRIPTIONREQUEST.fields_by_name["subscription"]._options = None -_UPDATESUBSCRIPTIONREQUEST.fields_by_name["subscription"]._options = None -_UPDATESUBSCRIPTIONREQUEST.fields_by_name["update_mask"]._options = None -_LISTSUBSCRIPTIONSREQUEST.fields_by_name["project"]._options = None -_DELETESUBSCRIPTIONREQUEST.fields_by_name["subscription"]._options = None -_MODIFYPUSHCONFIGREQUEST.fields_by_name["subscription"]._options = None -_MODIFYPUSHCONFIGREQUEST.fields_by_name["push_config"]._options = None -_PULLREQUEST.fields_by_name["subscription"]._options = None -_PULLREQUEST.fields_by_name["return_immediately"]._options = None -_PULLREQUEST.fields_by_name["max_messages"]._options = None -_MODIFYACKDEADLINEREQUEST.fields_by_name["subscription"]._options = None -_MODIFYACKDEADLINEREQUEST.fields_by_name["ack_ids"]._options = None -_MODIFYACKDEADLINEREQUEST.fields_by_name["ack_deadline_seconds"]._options = None -_ACKNOWLEDGEREQUEST.fields_by_name["subscription"]._options = None -_ACKNOWLEDGEREQUEST.fields_by_name["ack_ids"]._options = None -_STREAMINGPULLREQUEST.fields_by_name["subscription"]._options = None -_STREAMINGPULLREQUEST.fields_by_name["stream_ack_deadline_seconds"]._options = None -_CREATESNAPSHOTREQUEST_LABELSENTRY._options = None -_CREATESNAPSHOTREQUEST.fields_by_name["name"]._options = None -_CREATESNAPSHOTREQUEST.fields_by_name["subscription"]._options = None -_UPDATESNAPSHOTREQUEST.fields_by_name["snapshot"]._options = None -_UPDATESNAPSHOTREQUEST.fields_by_name["update_mask"]._options = None -_SNAPSHOT_LABELSENTRY._options = None -_SNAPSHOT.fields_by_name["topic"]._options = None -_SNAPSHOT._options = None -_GETSNAPSHOTREQUEST.fields_by_name["snapshot"]._options = None -_LISTSNAPSHOTSREQUEST.fields_by_name["project"]._options = None -_DELETESNAPSHOTREQUEST.fields_by_name["snapshot"]._options = None -_SEEKREQUEST.fields_by_name["subscription"]._options = None -_SEEKREQUEST.fields_by_name["snapshot"]._options = None - -_PUBLISHER = _descriptor.ServiceDescriptor( - name="Publisher", - full_name="google.pubsub.v1.Publisher", - file=DESCRIPTOR, - index=0, - serialized_options=b"\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub", - create_key=_descriptor._internal_create_key, - serialized_start=6417, - serialized_end=7860, - methods=[ - _descriptor.MethodDescriptor( - name="CreateTopic", - full_name="google.pubsub.v1.Publisher.CreateTopic", - index=0, - containing_service=None, - input_type=_TOPIC, - output_type=_TOPIC, - serialized_options=b"\202\323\344\223\002#\032\036/v1/{name=projects/*/topics/*}:\001*\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="UpdateTopic", - full_name="google.pubsub.v1.Publisher.UpdateTopic", - index=1, - containing_service=None, - input_type=_UPDATETOPICREQUEST, - output_type=_TOPIC, - serialized_options=b"\202\323\344\223\002)2$/v1/{topic.name=projects/*/topics/*}:\001*", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="Publish", - full_name="google.pubsub.v1.Publisher.Publish", - index=2, - containing_service=None, - input_type=_PUBLISHREQUEST, - output_type=_PUBLISHRESPONSE, - serialized_options=b"\202\323\344\223\002,\"'/v1/{topic=projects/*/topics/*}:publish:\001*\332A\016topic,messages", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetTopic", - full_name="google.pubsub.v1.Publisher.GetTopic", - index=3, - containing_service=None, - input_type=_GETTOPICREQUEST, - output_type=_TOPIC, - serialized_options=b"\202\323\344\223\002!\022\037/v1/{topic=projects/*/topics/*}\332A\005topic", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListTopics", - full_name="google.pubsub.v1.Publisher.ListTopics", - index=4, - containing_service=None, - input_type=_LISTTOPICSREQUEST, - output_type=_LISTTOPICSRESPONSE, - serialized_options=b"\202\323\344\223\002!\022\037/v1/{project=projects/*}/topics\332A\007project", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListTopicSubscriptions", - full_name="google.pubsub.v1.Publisher.ListTopicSubscriptions", - index=5, - containing_service=None, - input_type=_LISTTOPICSUBSCRIPTIONSREQUEST, - output_type=_LISTTOPICSUBSCRIPTIONSRESPONSE, - serialized_options=b"\202\323\344\223\002/\022-/v1/{topic=projects/*/topics/*}/subscriptions\332A\005topic", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListTopicSnapshots", - full_name="google.pubsub.v1.Publisher.ListTopicSnapshots", - index=6, - containing_service=None, - input_type=_LISTTOPICSNAPSHOTSREQUEST, - output_type=_LISTTOPICSNAPSHOTSRESPONSE, - serialized_options=b"\202\323\344\223\002+\022)/v1/{topic=projects/*/topics/*}/snapshots\332A\005topic", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteTopic", - full_name="google.pubsub.v1.Publisher.DeleteTopic", - index=7, - containing_service=None, - input_type=_DELETETOPICREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002!*\037/v1/{topic=projects/*/topics/*}\332A\005topic", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DetachSubscription", - full_name="google.pubsub.v1.Publisher.DetachSubscription", - index=8, - containing_service=None, - input_type=_DETACHSUBSCRIPTIONREQUEST, - output_type=_DETACHSUBSCRIPTIONRESPONSE, - serialized_options=b'\202\323\344\223\0026"4/v1/{subscription=projects/*/subscriptions/*}:detach', - create_key=_descriptor._internal_create_key, - ), - ], -) -_sym_db.RegisterServiceDescriptor(_PUBLISHER) - -DESCRIPTOR.services_by_name["Publisher"] = _PUBLISHER - - -_SUBSCRIBER = _descriptor.ServiceDescriptor( - name="Subscriber", - full_name="google.pubsub.v1.Subscriber", - file=DESCRIPTOR, - index=1, - serialized_options=b"\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub", - create_key=_descriptor._internal_create_key, - serialized_start=7863, - serialized_end=10554, - methods=[ - _descriptor.MethodDescriptor( - name="CreateSubscription", - full_name="google.pubsub.v1.Subscriber.CreateSubscription", - index=0, - containing_service=None, - input_type=_SUBSCRIPTION, - output_type=_SUBSCRIPTION, - serialized_options=b"\202\323\344\223\002*\032%/v1/{name=projects/*/subscriptions/*}:\001*\332A+name,topic,push_config,ack_deadline_seconds", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetSubscription", - full_name="google.pubsub.v1.Subscriber.GetSubscription", - index=1, - containing_service=None, - input_type=_GETSUBSCRIPTIONREQUEST, - output_type=_SUBSCRIPTION, - serialized_options=b"\202\323\344\223\002/\022-/v1/{subscription=projects/*/subscriptions/*}\332A\014subscription", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="UpdateSubscription", - full_name="google.pubsub.v1.Subscriber.UpdateSubscription", - index=2, - containing_service=None, - input_type=_UPDATESUBSCRIPTIONREQUEST, - output_type=_SUBSCRIPTION, - serialized_options=b"\202\323\344\223\002722/v1/{subscription.name=projects/*/subscriptions/*}:\001*", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListSubscriptions", - full_name="google.pubsub.v1.Subscriber.ListSubscriptions", - index=3, - containing_service=None, - input_type=_LISTSUBSCRIPTIONSREQUEST, - output_type=_LISTSUBSCRIPTIONSRESPONSE, - serialized_options=b"\202\323\344\223\002(\022&/v1/{project=projects/*}/subscriptions\332A\007project", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteSubscription", - full_name="google.pubsub.v1.Subscriber.DeleteSubscription", - index=4, - containing_service=None, - input_type=_DELETESUBSCRIPTIONREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002/*-/v1/{subscription=projects/*/subscriptions/*}\332A\014subscription", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ModifyAckDeadline", - full_name="google.pubsub.v1.Subscriber.ModifyAckDeadline", - index=5, - containing_service=None, - input_type=_MODIFYACKDEADLINEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b'\202\323\344\223\002D"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\001*\332A)subscription,ack_ids,ack_deadline_seconds', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="Acknowledge", - full_name="google.pubsub.v1.Subscriber.Acknowledge", - index=6, - containing_service=None, - input_type=_ACKNOWLEDGEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b'\202\323\344\223\002>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\001*\332A\024subscription,ack_ids', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="Pull", - full_name="google.pubsub.v1.Subscriber.Pull", - index=7, - containing_service=None, - input_type=_PULLREQUEST, - output_type=_PULLRESPONSE, - serialized_options=b'\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:pull:\001*\332A,subscription,return_immediately,max_messages', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="StreamingPull", - full_name="google.pubsub.v1.Subscriber.StreamingPull", - index=8, - containing_service=None, - input_type=_STREAMINGPULLREQUEST, - output_type=_STREAMINGPULLRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ModifyPushConfig", - full_name="google.pubsub.v1.Subscriber.ModifyPushConfig", - index=9, - containing_service=None, - input_type=_MODIFYPUSHCONFIGREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b'\202\323\344\223\002C">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\001*\332A\030subscription,push_config', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetSnapshot", - full_name="google.pubsub.v1.Subscriber.GetSnapshot", - index=10, - containing_service=None, - input_type=_GETSNAPSHOTREQUEST, - output_type=_SNAPSHOT, - serialized_options=b"\202\323\344\223\002'\022%/v1/{snapshot=projects/*/snapshots/*}\332A\010snapshot", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListSnapshots", - full_name="google.pubsub.v1.Subscriber.ListSnapshots", - index=11, - containing_service=None, - input_type=_LISTSNAPSHOTSREQUEST, - output_type=_LISTSNAPSHOTSRESPONSE, - serialized_options=b'\202\323\344\223\002$\022"/v1/{project=projects/*}/snapshots\332A\007project', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="CreateSnapshot", - full_name="google.pubsub.v1.Subscriber.CreateSnapshot", - index=12, - containing_service=None, - input_type=_CREATESNAPSHOTREQUEST, - output_type=_SNAPSHOT, - serialized_options=b"\202\323\344\223\002&\032!/v1/{name=projects/*/snapshots/*}:\001*\332A\021name,subscription", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="UpdateSnapshot", - full_name="google.pubsub.v1.Subscriber.UpdateSnapshot", - index=13, - containing_service=None, - input_type=_UPDATESNAPSHOTREQUEST, - output_type=_SNAPSHOT, - serialized_options=b"\202\323\344\223\002/2*/v1/{snapshot.name=projects/*/snapshots/*}:\001*", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteSnapshot", - full_name="google.pubsub.v1.Subscriber.DeleteSnapshot", - index=14, - containing_service=None, - input_type=_DELETESNAPSHOTREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002'*%/v1/{snapshot=projects/*/snapshots/*}\332A\010snapshot", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="Seek", - full_name="google.pubsub.v1.Subscriber.Seek", - index=15, - containing_service=None, - input_type=_SEEKREQUEST, - output_type=_SEEKRESPONSE, - serialized_options=b'\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:seek:\001*', - create_key=_descriptor._internal_create_key, - ), - ], -) -_sym_db.RegisterServiceDescriptor(_SUBSCRIBER) - -DESCRIPTOR.services_by_name["Subscriber"] = _SUBSCRIBER - -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py b/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py deleted file mode 100644 index ca2cf7903..000000000 --- a/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py +++ /dev/null @@ -1,1284 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc - -from google.cloud.pubsub_v1.proto import ( - pubsub_pb2 as google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class PublisherStub(object): - """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateTopic = channel.unary_unary( - "/google.pubsub.v1.Publisher/CreateTopic", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - ) - self.UpdateTopic = channel.unary_unary( - "/google.pubsub.v1.Publisher/UpdateTopic", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - ) - self.Publish = channel.unary_unary( - "/google.pubsub.v1.Publisher/Publish", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.FromString, - ) - self.GetTopic = channel.unary_unary( - "/google.pubsub.v1.Publisher/GetTopic", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - ) - self.ListTopics = channel.unary_unary( - "/google.pubsub.v1.Publisher/ListTopics", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.FromString, - ) - self.ListTopicSubscriptions = channel.unary_unary( - "/google.pubsub.v1.Publisher/ListTopicSubscriptions", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, - ) - self.ListTopicSnapshots = channel.unary_unary( - "/google.pubsub.v1.Publisher/ListTopicSnapshots", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsResponse.FromString, - ) - self.DeleteTopic = channel.unary_unary( - "/google.pubsub.v1.Publisher/DeleteTopic", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.DetachSubscription = channel.unary_unary( - "/google.pubsub.v1.Publisher/DetachSubscription", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionResponse.FromString, - ) - - -class PublisherServicer(object): - """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - def CreateTopic(self, request, context): - """Creates the given topic with the given name. See the - - resource name rules. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateTopic(self, request, context): - """Updates an existing topic. Note that certain properties of a - topic are not modifiable. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Publish(self, request, context): - """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic - does not exist. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetTopic(self, request, context): - """Gets the configuration of a topic. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTopics(self, request, context): - """Lists matching topics. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTopicSubscriptions(self, request, context): - """Lists the names of the attached subscriptions on this topic. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTopicSnapshots(self, request, context): - """Lists the names of the snapshots on this topic. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteTopic(self, request, context): - """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic - does not exist. After a topic is deleted, a new topic may be created with - the same name; this is an entirely new topic with none of the old - configuration or subscriptions. Existing subscriptions to this topic are - not deleted, but their `topic` field is set to `_deleted-topic_`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DetachSubscription(self, request, context): - """Detaches a subscription from this topic. All messages retained in the - subscription are dropped. Subsequent `Pull` and `StreamingPull` requests - will return FAILED_PRECONDITION. If the subscription is a push - subscription, pushes to the endpoint will stop. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_PublisherServicer_to_server(servicer, server): - rpc_method_handlers = { - "CreateTopic": grpc.unary_unary_rpc_method_handler( - servicer.CreateTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - ), - "UpdateTopic": grpc.unary_unary_rpc_method_handler( - servicer.UpdateTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - ), - "Publish": grpc.unary_unary_rpc_method_handler( - servicer.Publish, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.SerializeToString, - ), - "GetTopic": grpc.unary_unary_rpc_method_handler( - servicer.GetTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - ), - "ListTopics": grpc.unary_unary_rpc_method_handler( - servicer.ListTopics, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.SerializeToString, - ), - "ListTopicSubscriptions": grpc.unary_unary_rpc_method_handler( - servicer.ListTopicSubscriptions, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.SerializeToString, - ), - "ListTopicSnapshots": grpc.unary_unary_rpc_method_handler( - servicer.ListTopicSnapshots, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsResponse.SerializeToString, - ), - "DeleteTopic": grpc.unary_unary_rpc_method_handler( - servicer.DeleteTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "DetachSubscription": grpc.unary_unary_rpc_method_handler( - servicer.DetachSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.pubsub.v1.Publisher", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) - - -# This class is part of an EXPERIMENTAL API. -class Publisher(object): - """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - @staticmethod - def CreateTopic( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/CreateTopic", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def UpdateTopic( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/UpdateTopic", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def Publish( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/Publish", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetTopic( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/GetTopic", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListTopics( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/ListTopics", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListTopicSubscriptions( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/ListTopicSubscriptions", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListTopicSnapshots( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/ListTopicSnapshots", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteTopic( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/DeleteTopic", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DetachSubscription( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/DetachSubscription", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - -class SubscriberStub(object): - """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method or by - establishing a bi-directional stream using the `StreamingPull` method. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateSubscription = channel.unary_unary( - "/google.pubsub.v1.Subscriber/CreateSubscription", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - ) - self.GetSubscription = channel.unary_unary( - "/google.pubsub.v1.Subscriber/GetSubscription", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - ) - self.UpdateSubscription = channel.unary_unary( - "/google.pubsub.v1.Subscriber/UpdateSubscription", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - ) - self.ListSubscriptions = channel.unary_unary( - "/google.pubsub.v1.Subscriber/ListSubscriptions", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsResponse.FromString, - ) - self.DeleteSubscription = channel.unary_unary( - "/google.pubsub.v1.Subscriber/DeleteSubscription", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.ModifyAckDeadline = channel.unary_unary( - "/google.pubsub.v1.Subscriber/ModifyAckDeadline", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyAckDeadlineRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.Acknowledge = channel.unary_unary( - "/google.pubsub.v1.Subscriber/Acknowledge", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.AcknowledgeRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.Pull = channel.unary_unary( - "/google.pubsub.v1.Subscriber/Pull", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullResponse.FromString, - ) - self.StreamingPull = channel.stream_stream( - "/google.pubsub.v1.Subscriber/StreamingPull", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullResponse.FromString, - ) - self.ModifyPushConfig = channel.unary_unary( - "/google.pubsub.v1.Subscriber/ModifyPushConfig", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.GetSnapshot = channel.unary_unary( - "/google.pubsub.v1.Subscriber/GetSnapshot", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSnapshotRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - ) - self.ListSnapshots = channel.unary_unary( - "/google.pubsub.v1.Subscriber/ListSnapshots", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsResponse.FromString, - ) - self.CreateSnapshot = channel.unary_unary( - "/google.pubsub.v1.Subscriber/CreateSnapshot", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - ) - self.UpdateSnapshot = channel.unary_unary( - "/google.pubsub.v1.Subscriber/UpdateSnapshot", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSnapshotRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - ) - self.DeleteSnapshot = channel.unary_unary( - "/google.pubsub.v1.Subscriber/DeleteSnapshot", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.Seek = channel.unary_unary( - "/google.pubsub.v1.Subscriber/Seek", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekResponse.FromString, - ) - - -class SubscriberServicer(object): - """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method or by - establishing a bi-directional stream using the `StreamingPull` method. - """ - - def CreateSubscription(self, request, context): - """Creates a subscription to a given topic. See the - - resource name rules. - If the subscription already exists, returns `ALREADY_EXISTS`. - If the corresponding topic doesn't exist, returns `NOT_FOUND`. - - If the name is not provided in the request, the server will assign a random - name for this subscription on the same project as the topic, conforming - to the - [resource name - format](https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Subscription object. Note that - for REST API requests, you must specify a name in the request. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetSubscription(self, request, context): - """Gets the configuration details of a subscription. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateSubscription(self, request, context): - """Updates an existing subscription. Note that certain properties of a - subscription, such as its topic, are not modifiable. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListSubscriptions(self, request, context): - """Lists matching subscriptions. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteSubscription(self, request, context): - """Deletes an existing subscription. All messages retained in the subscription - are immediately dropped. Calls to `Pull` after deletion will return - `NOT_FOUND`. After a subscription is deleted, a new one may be created with - the same name, but the new one has no association with the old - subscription or its topic unless the same topic is specified. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ModifyAckDeadline(self, request, context): - """Modifies the ack deadline for a specific message. This method is useful - to indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if the - processing was interrupted. Note that this does not modify the - subscription-level `ackDeadlineSeconds` used for subsequent messages. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Acknowledge(self, request, context): - """Acknowledges the messages associated with the `ack_ids` in the - `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages - from the subscription. - - Acknowledging a message whose ack deadline has expired may succeed, - but such a message may be redelivered later. Acknowledging a message more - than once will not result in an error. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Pull(self, request, context): - """Pulls messages from the server. The server may return `UNAVAILABLE` if - there are too many concurrent pull requests pending for the given - subscription. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def StreamingPull(self, request_iterator, context): - """Establishes a stream with the server, which sends messages down to the - client. The client streams acknowledgements and ack deadline modifications - back to the server. The server will close the stream and return the status - on any error. The server may close the stream with status `UNAVAILABLE` to - reassign server-side resources, in which case, the client should - re-establish the stream. Flow control can be achieved by configuring the - underlying RPC channel. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ModifyPushConfig(self, request, context): - """Modifies the `PushConfig` for a specified subscription. - - This may be used to change a push subscription to a pull one (signified by - an empty `PushConfig`) or vice versa, or change the endpoint URL and other - attributes of a push subscription. Messages will accumulate for delivery - continuously through the call regardless of changes to the `PushConfig`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetSnapshot(self, request, context): - """Gets the configuration details of a snapshot. Snapshots are used in - Seek - operations, which allow you to manage message acknowledgments in bulk. That - is, you can set the acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListSnapshots(self, request, context): - """Lists the existing snapshots. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateSnapshot(self, request, context): - """Creates a snapshot from the requested subscription. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. -

If the snapshot already exists, returns `ALREADY_EXISTS`. - If the requested subscription doesn't exist, returns `NOT_FOUND`. - If the backlog in the subscription is too old -- and the resulting snapshot - would expire in less than 1 hour -- then `FAILED_PRECONDITION` is returned. - See also the `Snapshot.expire_time` field. If the name is not provided in - the request, the server will assign a random - name for this snapshot on the same project as the subscription, conforming - to the - [resource name - format](https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Snapshot object. Note that for - REST API requests, you must specify a name in the request. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateSnapshot(self, request, context): - """Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteSnapshot(self, request, context): - """Removes an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- When the snapshot is deleted, all messages retained in the snapshot - are immediately dropped. After a snapshot is deleted, a new one may be - created with the same name, but the new one has no association with the old - snapshot or its subscription, unless the same subscription is specified. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Seek(self, request, context): - """Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. Note that both the subscription and the snapshot - must be on the same topic. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_SubscriberServicer_to_server(servicer, server): - rpc_method_handlers = { - "CreateSubscription": grpc.unary_unary_rpc_method_handler( - servicer.CreateSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - ), - "GetSubscription": grpc.unary_unary_rpc_method_handler( - servicer.GetSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSubscriptionRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - ), - "UpdateSubscription": grpc.unary_unary_rpc_method_handler( - servicer.UpdateSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSubscriptionRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - ), - "ListSubscriptions": grpc.unary_unary_rpc_method_handler( - servicer.ListSubscriptions, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsResponse.SerializeToString, - ), - "DeleteSubscription": grpc.unary_unary_rpc_method_handler( - servicer.DeleteSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSubscriptionRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "ModifyAckDeadline": grpc.unary_unary_rpc_method_handler( - servicer.ModifyAckDeadline, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyAckDeadlineRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "Acknowledge": grpc.unary_unary_rpc_method_handler( - servicer.Acknowledge, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.AcknowledgeRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "Pull": grpc.unary_unary_rpc_method_handler( - servicer.Pull, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullResponse.SerializeToString, - ), - "StreamingPull": grpc.stream_stream_rpc_method_handler( - servicer.StreamingPull, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullResponse.SerializeToString, - ), - "ModifyPushConfig": grpc.unary_unary_rpc_method_handler( - servicer.ModifyPushConfig, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "GetSnapshot": grpc.unary_unary_rpc_method_handler( - servicer.GetSnapshot, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSnapshotRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, - ), - "ListSnapshots": grpc.unary_unary_rpc_method_handler( - servicer.ListSnapshots, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsResponse.SerializeToString, - ), - "CreateSnapshot": grpc.unary_unary_rpc_method_handler( - servicer.CreateSnapshot, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.CreateSnapshotRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, - ), - "UpdateSnapshot": grpc.unary_unary_rpc_method_handler( - servicer.UpdateSnapshot, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSnapshotRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, - ), - "DeleteSnapshot": grpc.unary_unary_rpc_method_handler( - servicer.DeleteSnapshot, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSnapshotRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "Seek": grpc.unary_unary_rpc_method_handler( - servicer.Seek, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.pubsub.v1.Subscriber", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) - - -# This class is part of an EXPERIMENTAL API. -class Subscriber(object): - """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method or by - establishing a bi-directional stream using the `StreamingPull` method. - """ - - @staticmethod - def CreateSubscription( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/CreateSubscription", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetSubscription( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/GetSubscription", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSubscriptionRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def UpdateSubscription( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/UpdateSubscription", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSubscriptionRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListSubscriptions( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/ListSubscriptions", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteSubscription( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/DeleteSubscription", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSubscriptionRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ModifyAckDeadline( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/ModifyAckDeadline", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyAckDeadlineRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def Acknowledge( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/Acknowledge", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.AcknowledgeRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def Pull( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/Pull", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def StreamingPull( - request_iterator, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.stream_stream( - request_iterator, - target, - "/google.pubsub.v1.Subscriber/StreamingPull", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ModifyPushConfig( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/ModifyPushConfig", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetSnapshot( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/GetSnapshot", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSnapshotRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListSnapshots( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/ListSnapshots", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CreateSnapshot( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/CreateSnapshot", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def UpdateSnapshot( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/UpdateSnapshot", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSnapshotRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteSnapshot( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/DeleteSnapshot", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def Seek( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/Seek", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) diff --git a/google/cloud/pubsub_v1/proto/schema.proto b/google/cloud/pubsub_v1/proto/schema.proto new file mode 100644 index 000000000..1ace7ef3b --- /dev/null +++ b/google/cloud/pubsub_v1/proto/schema.proto @@ -0,0 +1,286 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.pubsub.v1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/protobuf/empty.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.PubSub.V1"; +option go_package = "google.golang.org/genproto/googleapis/pubsub/v1;pubsub"; +option java_multiple_files = true; +option java_outer_classname = "SchemaProto"; +option java_package = "com.google.pubsub.v1"; +option php_namespace = "Google\\Cloud\\PubSub\\V1"; +option ruby_package = "Google::Cloud::PubSub::V1"; + +// Service for doing schema-related operations. +service SchemaService { + option (google.api.default_host) = "pubsub.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/pubsub"; + + // Creates a schema. + rpc CreateSchema(CreateSchemaRequest) returns (Schema) { + option (google.api.http) = { + post: "/v1/{parent=projects/*}/schemas" + body: "schema" + }; + option (google.api.method_signature) = "parent,schema,schema_id"; + } + + // Gets a schema. + rpc GetSchema(GetSchemaRequest) returns (Schema) { + option (google.api.http) = { + get: "/v1/{name=projects/*/schemas/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists schemas in a project. + rpc ListSchemas(ListSchemasRequest) returns (ListSchemasResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*}/schemas" + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes a schema. + rpc DeleteSchema(DeleteSchemaRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=projects/*/schemas/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Validates a schema. + rpc ValidateSchema(ValidateSchemaRequest) returns (ValidateSchemaResponse) { + option (google.api.http) = { + post: "/v1/{parent=projects/*}/schemas:validate" + body: "*" + }; + option (google.api.method_signature) = "parent,schema"; + } + + // Validates a message against a schema. + rpc ValidateMessage(ValidateMessageRequest) + returns (ValidateMessageResponse) { + option (google.api.http) = { + post: "/v1/{parent=projects/*}/schemas:validateMessage" + body: "*" + }; + } +} + +// A schema resource. +message Schema { + option (google.api.resource) = { + type: "pubsub.googleapis.com/Schema" + pattern: "projects/{project}/schemas/{schema}" + }; + + // Possible schema definition types. + enum Type { + // Default value. This value is unused. + TYPE_UNSPECIFIED = 0; + + // A Protocol Buffer schema definition. + PROTOCOL_BUFFER = 1; + + // An Avro schema definition. + AVRO = 2; + } + + // Required. Name of the schema. + // Format is `projects/{project}/schemas/{schema}`. + string name = 1 [(google.api.field_behavior) = REQUIRED]; + + // The type of the schema definition. + Type type = 2; + + // The definition of the schema. This should contain a string representing + // the full definition of the schema that is a valid schema definition of + // the type specified in `type`. + string definition = 3; +} + +// Request for the CreateSchema method. +message CreateSchemaRequest { + // Required. The name of the project in which to create the schema. + // Format is `projects/{project-id}`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "pubsub.googleapis.com/Schema" + } + ]; + + // Required. The schema object to create. + // + // This schema's `name` parameter is ignored. The schema object returned + // by CreateSchema will have a `name` made using the given `parent` and + // `schema_id`. + Schema schema = 2 [(google.api.field_behavior) = REQUIRED]; + + // The ID to use for the schema, which will become the final component of + // the schema's resource name. + // + // See https://cloud.google.com/pubsub/docs/admin#resource_names for resource + // name constraints. + string schema_id = 3; +} + +// View of Schema object fields to be returned by GetSchema and ListSchemas. +enum SchemaView { + // The default / unset value. + // The API will default to the BASIC view. + SCHEMA_VIEW_UNSPECIFIED = 0; + + // Include the name and type of the schema, but not the definition. + BASIC = 1; + + // Include all Schema object fields. + FULL = 2; +} + +// Request for the GetSchema method. +message GetSchemaRequest { + // Required. The name of the schema to get. + // Format is `projects/{project}/schemas/{schema}`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Schema" } + ]; + + // The set of fields to return in the response. If not set, returns a Schema + // with `name` and `type`, but not `definition`. Set to `FULL` to retrieve all + // fields. + SchemaView view = 2; +} + +// Request for the `ListSchemas` method. +message ListSchemasRequest { + // Required. The name of the project in which to list schemas. + // Format is `projects/{project-id}`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; + + // The set of Schema fields to return in the response. If not set, returns + // Schemas with `name` and `type`, but not `definition`. Set to `FULL` to + // retrieve all fields. + SchemaView view = 2; + + // Maximum number of schemas to return. + int32 page_size = 3; + + // The value returned by the last `ListSchemasResponse`; indicates that + // this is a continuation of a prior `ListSchemas` call, and that the + // system should return the next page of data. + string page_token = 4; +} + +// Response for the `ListSchemas` method. +message ListSchemasResponse { + // The resulting schemas. + repeated Schema schemas = 1; + + // If not empty, indicates that there may be more schemas that match the + // request; this value should be passed in a new `ListSchemasRequest`. + string next_page_token = 2; +} + +// Request for the `DeleteSchema` method. +message DeleteSchemaRequest { + // Required. Name of the schema to delete. + // Format is `projects/{project}/schemas/{schema}`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Schema" } + ]; +} + +// Request for the `ValidateSchema` method. +message ValidateSchemaRequest { + // Required. The name of the project in which to validate schemas. + // Format is `projects/{project-id}`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; + + // Required. The schema object to validate. + Schema schema = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Response for the `ValidateSchema` method. +message ValidateSchemaResponse {} + +// Request for the `ValidateMessage` method. +message ValidateMessageRequest { + // Required. The name of the project in which to validate schemas. + // Format is `projects/{project-id}`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; + + oneof schema_spec { + // Name of the schema against which to validate. + // + // Format is `projects/{project}/schemas/{schema}`. + string name = 2 [ + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Schema" } + ]; + + // Ad-hoc schema against which to validate + Schema schema = 3; + } + + // Message to validate against the provided `schema_spec`. + bytes message = 4; + + // The encoding expected for messages + Encoding encoding = 5; +} + +// Response for the `ValidateMessage` method. +message ValidateMessageResponse {} + +// Possible encoding types for messages. +enum Encoding { + // Unspecified + ENCODING_UNSPECIFIED = 0; + + // JSON encoding + JSON = 1; + + // Binary encoding, as defined by the schema type. For some schema types, + // binary encoding may not be available. + BINARY = 2; +} diff --git a/google/cloud/pubsub_v1/publisher/_batch/base.py b/google/cloud/pubsub_v1/publisher/_batch/base.py index 212a4b277..c91e0a444 100644 --- a/google/cloud/pubsub_v1/publisher/_batch/base.py +++ b/google/cloud/pubsub_v1/publisher/_batch/base.py @@ -16,12 +16,21 @@ import abc import enum +import typing +from typing import Optional, Sequence -import six +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) -@six.add_metaclass(abc.ABCMeta) -class Batch(object): +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1 import types + from google.pubsub_v1 import types as gapic_types + + +class Batch(metaclass=abc.ABCMeta): """The base batching class for Pub/Sub publishing. Although the :class:`~.pubsub_v1.publisher.batch.thread.Batch` class, based @@ -49,11 +58,11 @@ class Batch(object): def __len__(self): """Return the number of messages currently in the batch.""" - return len(self.messages) + return len(self.message_wrappers) @staticmethod @abc.abstractmethod - def make_lock(): + def make_lock(): # pragma: NO COVER """Return a lock in the chosen concurrency model. Returns: @@ -63,17 +72,17 @@ def make_lock(): @property @abc.abstractmethod - def messages(self): + def message_wrappers(self) -> Sequence[PublishMessageWrapper]: # pragma: NO COVER """Return the messages currently in the batch. Returns: - Sequence: The messages currently in the batch. + The messages currently in the batch. """ raise NotImplementedError @property @abc.abstractmethod - def size(self): + def size(self) -> int: # pragma: NO COVER """Return the total size of all of the messages currently in the batch. The size includes any overhead of the actual ``PublishRequest`` that is @@ -87,42 +96,45 @@ def size(self): @property @abc.abstractmethod - def settings(self): + def settings(self) -> "types.BatchSettings": # pragma: NO COVER """Return the batch settings. Returns: - ~.pubsub_v1.types.BatchSettings: The batch settings. These are - considered immutable once the batch has been opened. + The batch settings. These are considered immutable once the batch has + been opened. """ raise NotImplementedError @property @abc.abstractmethod - def status(self): + def status(self) -> "BatchStatus": # pragma: NO COVER """Return the status of this batch. Returns: - str: The status of this batch. All statuses are human-readable, - all-lowercase strings. The ones represented in the - :class:`BaseBatch.Status` enum are special, but other statuses - are permitted. + The status of this batch. All statuses are human-readable, all-lowercase + strings. The ones represented in the :class:`BaseBatch.Status` enum are + special, but other statuses are permitted. """ raise NotImplementedError - def cancel(self, cancellation_reason): + def cancel( + self, cancellation_reason: "BatchCancellationReason" + ) -> None: # pragma: NO COVER """Complete pending futures with an exception. This method must be called before publishing starts (ie: while the batch is still accepting messages.) Args: - cancellation_reason (BatchCancellationReason): The reason why this - batch has been cancelled. + cancellation_reason: + The reason why this batch has been cancelled. """ raise NotImplementedError @abc.abstractmethod - def publish(self, message): + def publish( + self, message: "gapic_types.PubsubMessage" + ) -> Optional["pubsub_v1.publisher.futures.Future"]: # pragma: NO COVER """Publish a single message. Add the given message to this object; this will cause it to be @@ -132,11 +144,12 @@ def publish(self, message): This method is called by :meth:`~.PublisherClient.publish`. Args: - message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + message: The Pub/Sub message. Returns: - ~google.api_core.future.Future: An object conforming to the - :class:`concurrent.futures.Future` interface. + An object conforming to the :class:`concurrent.futures.Future` interface. + If :data:`None` is returned, that signals that the batch cannot + accept a message. """ raise NotImplementedError diff --git a/google/cloud/pubsub_v1/publisher/_batch/thread.py b/google/cloud/pubsub_v1/publisher/_batch/thread.py index 67c9f2de3..2afbe3761 100644 --- a/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -17,20 +17,35 @@ import logging import threading import time +import typing +from typing import Any, Callable, List, Optional, Sequence +from datetime import datetime -import six - +from opentelemetry import trace import google.api_core.exceptions -from google.cloud.pubsub_v1 import types +from google.api_core import gapic_v1 +from google.auth import exceptions as auth_exceptions + from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher import futures from google.cloud.pubsub_v1.publisher._batch import base +from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1 import types + from google.cloud.pubsub_v1.publisher import Client as PublisherClient + from google.pubsub_v1.services.publisher.client import OptionalRetry _LOGGER = logging.getLogger(__name__) _CAN_COMMIT = (base.BatchStatus.ACCEPTING_MESSAGES, base.BatchStatus.STARTING) _SERVER_PUBLISH_MAX_BYTES = 10 * 1000 * 1000 # max accepted size of PublishRequest +_raw_proto_pubbsub_message = gapic_types.PubsubMessage.pb() + class Batch(base.Batch): """A batch of messages. @@ -55,24 +70,40 @@ class Batch(base.Batch): implementation details. Args: - client (~.pubsub_v1.PublisherClient): The publisher client used to - create this batch. - topic (str): The topic. The format for this is - ``projects/{project}/topics/{topic}``. - settings (~.pubsub_v1.types.BatchSettings): The settings for batch - publishing. These should be considered immutable once the batch - has been opened. - batch_done_callback (Callable[[bool], Any]): Callback called when the - response for a batch publish has been received. Called with one - boolean argument: successfully published or a permanent error - occurred. Temporary errors are not surfaced because they are retried + client: + The publisher client used to create this batch. + topic: + The topic. The format for this is ``projects/{project}/topics/{topic}``. + settings: + The settings for batch publishing. These should be considered immutable + once the batch has been opened. + batch_done_callback: + Callback called when the response for a batch publish has been received. + Called with one boolean argument: successfully published or a permanent + error occurred. Temporary errors are not surfaced because they are retried at a lower level. - commit_when_full (bool): Whether to commit the batch when the batch - is full. + commit_when_full: + Whether to commit the batch when the batch is full. + commit_retry: + Designation of what errors, if any, should be retried when commiting + the batch. If not provided, a default retry is used. + commit_timeout: + The timeout to apply when commiting the batch. If not provided, a default + timeout is used. """ + _OPEN_TELEMETRY_TRACER_NAME: str = "google.cloud.pubsub_v1" + _OPEN_TELEMETRY_MESSAGING_SYSTEM: str = "gcp_pubsub" + def __init__( - self, client, topic, settings, batch_done_callback=None, commit_when_full=True + self, + client: "PublisherClient", + topic: str, + settings: "types.BatchSettings", + batch_done_callback: Optional[Callable[[bool], Any]] = None, + commit_when_full: bool = True, + commit_retry: "OptionalRetry" = gapic_v1.method.DEFAULT, + commit_timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, ): self._client = client self._topic = topic @@ -86,76 +117,82 @@ def __init__( # _futures list should remain unchanged after batch # status changed from ACCEPTING_MESSAGES to any other # in order to avoid race conditions - self._futures = [] - self._messages = [] + self._futures: List[futures.Future] = [] + self._message_wrappers: List[PublishMessageWrapper] = [] self._status = base.BatchStatus.ACCEPTING_MESSAGES # The initial size is not zero, we need to account for the size overhead # of the PublishRequest message itself. - self._base_request_size = types.PublishRequest(topic=topic).ByteSize() + self._base_request_size = gapic_types.PublishRequest(topic=topic)._pb.ByteSize() self._size = self._base_request_size + self._commit_retry = commit_retry + self._commit_timeout = commit_timeout + + # Publish RPC Span that will be set by method `_start_publish_rpc_span` + # if Open Telemetry is enabled. + self._rpc_span: Optional[trace.Span] = None + @staticmethod - def make_lock(): + def make_lock() -> threading.Lock: """Return a threading lock. Returns: - _thread.Lock: A newly created lock. + A newly created lock. """ return threading.Lock() @property - def client(self): - """~.pubsub_v1.client.PublisherClient: A publisher client.""" + def client(self) -> "PublisherClient": + """A publisher client.""" return self._client @property - def messages(self): - """Sequence: The messages currently in the batch.""" - return self._messages + def message_wrappers(self) -> Sequence[PublishMessageWrapper]: + """The message wrappers currently in the batch.""" + return self._message_wrappers @property - def settings(self): + def settings(self) -> "types.BatchSettings": """Return the batch settings. Returns: - ~.pubsub_v1.types.BatchSettings: The batch settings. These are - considered immutable once the batch has been opened. + The batch settings. These are considered immutable once the batch has + been opened. """ return self._settings @property - def size(self): + def size(self) -> int: """Return the total size of all of the messages currently in the batch. The size includes any overhead of the actual ``PublishRequest`` that is sent to the backend. Returns: - int: The total size of all of the messages currently - in the batch (including the request overhead), in bytes. + The total size of all of the messages currently in the batch (including + the request overhead), in bytes. """ return self._size @property - def status(self): + def status(self) -> base.BatchStatus: """Return the status of this batch. Returns: - str: The status of this batch. All statuses are human-readable, - all-lowercase strings. + The status of this batch. All statuses are human-readable, all-lowercase + strings. """ return self._status - def cancel(self, cancellation_reason): + def cancel(self, cancellation_reason: base.BatchCancellationReason) -> None: """Complete pending futures with an exception. This method must be called before publishing starts (ie: while the batch is still accepting messages.) Args: - cancellation_reason (BatchCancellationReason): The reason why this - batch has been cancelled. + The reason why this batch has been cancelled. """ with self._state_lock: @@ -168,7 +205,7 @@ def cancel(self, cancellation_reason): future.set_exception(exc) self._status = base.BatchStatus.ERROR - def commit(self): + def commit(self) -> None: """Actually publish all of the messages on the active batch. .. note:: @@ -193,15 +230,49 @@ def commit(self): self._start_commit_thread() - def _start_commit_thread(self): + def _start_commit_thread(self) -> None: """Start a new thread to actually handle the commit.""" - + # NOTE: If the thread is *not* a daemon, a memory leak exists due to a CPython issue. + # https://github.com/googleapis/python-pubsub/issues/395#issuecomment-829910303 + # https://github.com/googleapis/python-pubsub/issues/395#issuecomment-830092418 commit_thread = threading.Thread( - name="Thread-CommitBatchPublisher", target=self._commit + name="Thread-CommitBatchPublisher", target=self._commit, daemon=True ) commit_thread.start() - def _commit(self): + def _start_publish_rpc_span(self) -> None: + tracer = trace.get_tracer(self._OPEN_TELEMETRY_TRACER_NAME) + links = [] + + for wrapper in self._message_wrappers: + span = wrapper.create_span + # Add links only for sampled spans. + if span.get_span_context().trace_flags.sampled: + links.append(trace.Link(span.get_span_context())) + assert len(self._topic.split("/")) == 4 + topic_short_name = self._topic.split("/")[3] + with tracer.start_as_current_span( + name=f"{topic_short_name} publish", + attributes={ + "messaging.system": self._OPEN_TELEMETRY_MESSAGING_SYSTEM, + "messaging.destination.name": topic_short_name, + "gcp.project_id": self._topic.split("/")[1], + "messaging.batch.message_count": len(self._message_wrappers), + "messaging.operation": "publish", + "code.function": "_commit", + }, + links=links, + kind=trace.SpanKind.CLIENT, + end_on_exit=False, + ) as rpc_span: + ctx = rpc_span.get_span_context() + for wrapper in self._message_wrappers: + span = wrapper.create_span + if span.get_span_context().trace_flags.sampled: + span.add_link(ctx) + self._rpc_span = rpc_span + + def _commit(self) -> None: """Actually publish all of the messages on the active batch. This moves the batch out from being the active batch to an in progress @@ -234,7 +305,7 @@ def _commit(self): # https://github.com/googleapis/google-cloud-python/issues/8036 # Sanity check: If there are no messages, no-op. - if not self._messages: + if not self._message_wrappers: _LOGGER.debug("No messages to publish, exiting commit") self._status = base.BatchStatus.SUCCESS return @@ -245,23 +316,62 @@ def _commit(self): batch_transport_succeeded = True try: - # Performs retries for errors defined in retry_codes.publish in the - # publisher_client_config.py file. - response = self._client.api.publish(self._topic, self._messages) - except google.api_core.exceptions.GoogleAPIError as exc: + if self._client.open_telemetry_enabled: + self._start_publish_rpc_span() + + # Performs retries for errors defined by the retry configuration. + response = self._client._gapic_publish( + topic=self._topic, + messages=[wrapper.message for wrapper in self._message_wrappers], + retry=self._commit_retry, + timeout=self._commit_timeout, + ) + + if self._client.open_telemetry_enabled: + assert self._rpc_span is not None + self._rpc_span.end() + end_time = str(datetime.now()) + for message_id, wrapper in zip( + response.message_ids, self._message_wrappers + ): + span = wrapper.create_span + span.add_event( + name="publish end", + attributes={ + "timestamp": end_time, + }, + ) + span.set_attribute(key="messaging.message.id", value=message_id) + wrapper.end_create_span() + except ( + google.api_core.exceptions.GoogleAPIError, + auth_exceptions.TransportError, + ) as exc: # We failed to publish, even after retries, so set the exception on # all futures and exit. self._status = base.BatchStatus.ERROR - for future in self._futures: - future.set_exception(exc) + if self._client.open_telemetry_enabled: + if self._rpc_span: + self._rpc_span.record_exception( + exception=exc, + ) + self._rpc_span.set_status( + trace.Status(status_code=trace.StatusCode.ERROR) + ) + self._rpc_span.end() + + for wrapper in self._message_wrappers: + wrapper.end_create_span(exc=exc) batch_transport_succeeded = False if self._batch_done_callback is not None: # Failed to publish batch. self._batch_done_callback(batch_transport_succeeded) - _LOGGER.exception("Failed to publish %s messages.", len(self._futures)) + for future in self._futures: + future.set_exception(exc) + return end = time.time() @@ -272,8 +382,7 @@ def _commit(self): # IDs. We are trusting that there is a 1:1 mapping, and raise # an exception if not. self._status = base.BatchStatus.SUCCESS - zip_iter = six.moves.zip(response.message_ids, self._futures) - for message_id, future in zip_iter: + for message_id, future in zip(response.message_ids, self._futures): future.set_result(message_id) else: # Sanity check: If the number of message IDs is not equal to @@ -298,7 +407,10 @@ def _commit(self): if self._batch_done_callback is not None: self._batch_done_callback(batch_transport_succeeded) - def publish(self, message): + def publish( + self, + wrapper: PublishMessageWrapper, + ) -> Optional["pubsub_v1.publisher.futures.Future"]: """Publish a single message. Add the given message to this object; this will cause it to be @@ -309,13 +421,12 @@ def publish(self, message): This method is called by :meth:`~.PublisherClient.publish`. Args: - message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + wrapper: The Pub/Sub message wrapper. Returns: - Optional[~google.api_core.future.Future]: An object conforming to - the :class:`~concurrent.futures.Future` interface or :data:`None`. - If :data:`None` is returned, that signals that the batch cannot - accept a message. + An object conforming to the :class:`~concurrent.futures.Future` interface + or :data:`None`. If :data:`None` is returned, that signals that the batch + cannot accept a message. Raises: pubsub_v1.publisher.exceptions.MessageTooLargeError: If publishing @@ -323,8 +434,14 @@ def publish(self, message): """ # Coerce the type, just in case. - if not isinstance(message, types.PubsubMessage): - message = types.PubsubMessage(**message) + if not isinstance( + wrapper.message, gapic_types.PubsubMessage + ): # pragma: NO COVER + # For performance reasons, the message should be constructed by directly + # using the raw protobuf class, and only then wrapping it into the + # higher-level PubsubMessage class. + vanilla_pb = _raw_proto_pubbsub_message(**wrapper.message) + wrapper.message = gapic_types.PubsubMessage.wrap(vanilla_pb) future = None @@ -334,9 +451,11 @@ def publish(self, message): ), "Publish after stop() or publish error." if self.status != base.BatchStatus.ACCEPTING_MESSAGES: - return + return None - size_increase = types.PublishRequest(messages=[message]).ByteSize() + size_increase = gapic_types.PublishRequest( + messages=[wrapper.message] + )._pb.ByteSize() if (self._base_request_size + size_increase) > _SERVER_PUBLISH_MAX_BYTES: err_msg = ( @@ -347,20 +466,19 @@ def publish(self, message): raise exceptions.MessageTooLargeError(err_msg) new_size = self._size + size_increase - new_count = len(self._messages) + 1 + new_count = len(self._message_wrappers) + 1 size_limit = min(self.settings.max_bytes, _SERVER_PUBLISH_MAX_BYTES) overflow = new_size > size_limit or new_count >= self.settings.max_messages - if not self._messages or not overflow: - + if not self._message_wrappers or not overflow: # Store the actual message in the batch's message queue. - self._messages.append(message) + self._message_wrappers.append(wrapper) self._size = new_size # Track the future on this batch (so that the result of the # future can be set). - future = futures.Future(completed=threading.Event()) + future = futures.Future() self._futures.append(future) # Try to commit, but it must be **without** the lock held, since @@ -370,5 +488,5 @@ def publish(self, message): return future - def _set_status(self, status): + def _set_status(self, status: base.BatchStatus): self._status = status diff --git a/google/cloud/pubsub_v1/publisher/_sequencer/base.py b/google/cloud/pubsub_v1/publisher/_sequencer/base.py index fda5c1ee9..daaacaa33 100644 --- a/google/cloud/pubsub_v1/publisher/_sequencer/base.py +++ b/google/cloud/pubsub_v1/publisher/_sequencer/base.py @@ -15,30 +15,33 @@ from __future__ import absolute_import import abc +import typing -import six +from google.api_core import gapic_v1 +from google.pubsub_v1 import types as gapic_types +if typing.TYPE_CHECKING: # pragma: NO COVER + from concurrent import futures + from google.pubsub_v1.services.publisher.client import OptionalRetry -@six.add_metaclass(abc.ABCMeta) -class Sequencer(object): + +class Sequencer(metaclass=abc.ABCMeta): """The base class for sequencers for Pub/Sub publishing. A sequencer - sequences messages to be published. + sequences messages to be published. """ - @staticmethod @abc.abstractmethod - def is_finished(self): - """ Whether the sequencer is finished and should be cleaned up. + def is_finished(self) -> bool: # pragma: NO COVER + """Whether the sequencer is finished and should be cleaned up. - Returns: - bool: Whether the sequencer is finished and should be cleaned up. + Returns: + bool: Whether the sequencer is finished and should be cleaned up. """ raise NotImplementedError - @staticmethod @abc.abstractmethod - def unpause(self, message): - """ Unpauses this sequencer. + def unpause(self) -> None: # pragma: NO COVER + """Unpauses this sequencer. Raises: RuntimeError: @@ -46,18 +49,27 @@ def unpause(self, message): """ raise NotImplementedError - @staticmethod @abc.abstractmethod - def publish(self, message): - """ Publish message for this ordering key. + def publish( + self, + message: gapic_types.PubsubMessage, + retry: "OptionalRetry" = gapic_v1.method.DEFAULT, # type: ignore + timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, # type: ignore + ) -> "futures.Future": # pragma: NO COVER + """Publish message for this ordering key. Args: - message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + message: + The Pub/Sub message. + retry: + The retry settings to apply when publishing the message. + timeout: + The timeout to apply when publishing the message. Returns: A class instance that conforms to Python Standard library's - :class:`~concurrent.futures.Future` interface (but not an - instance of that class). The future might return immediately with a + :class:`~concurrent.futures.Future` interface. The future might return + immediately with a `pubsub_v1.publisher.exceptions.PublishToPausedOrderingKeyException` if the ordering key is paused. Otherwise, the future tracks the lifetime of the message publish. diff --git a/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py b/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py index d8ddb3f8f..9644a1fa2 100644 --- a/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py +++ b/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py @@ -14,12 +14,24 @@ import enum import collections -import concurrent.futures as futures import threading +import typing +from typing import Deque, Iterable, Sequence +from google.api_core import gapic_v1 +from google.cloud.pubsub_v1.publisher import futures from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher._sequencer import base as sequencer_base from google.cloud.pubsub_v1.publisher._batch import base as batch_base +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) + +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud.pubsub_v1 import types + from google.cloud.pubsub_v1.publisher import _batch + from google.cloud.pubsub_v1.publisher.client import Client as PublisherClient + from google.pubsub_v1.services.publisher.client import OptionalRetry class _OrderedSequencerStatus(str, enum.Enum): @@ -66,23 +78,24 @@ class _OrderedSequencerStatus(str, enum.Enum): class OrderedSequencer(sequencer_base.Sequencer): - """ Sequences messages into batches ordered by an ordering key for one topic. + """Sequences messages into batches ordered by an ordering key for one topic. - A sequencer always has at least one batch in it, unless paused or stopped. - When no batches remain, the |publishes_done_callback| is called so the - client can perform cleanup. + A sequencer always has at least one batch in it, unless paused or stopped. + When no batches remain, the |publishes_done_callback| is called so the + client can perform cleanup. - Public methods are thread-safe. + Public methods are thread-safe. - Args: - client (~.pubsub_v1.PublisherClient): The publisher client used to - create this sequencer. - topic (str): The topic. The format for this is - ``projects/{project}/topics/{topic}``. - ordering_key (str): The ordering key for this sequencer. + Args: + client: + The publisher client used to create this sequencer. + topic: + The topic. The format for this is ``projects/{project}/topics/{topic}``. + ordering_key: + The ordering key for this sequencer. """ - def __init__(self, client, topic, ordering_key): + def __init__(self, client: "PublisherClient", topic: str, ordering_key: str): self._client = client self._topic = topic self._ordering_key = ordering_key @@ -91,28 +104,28 @@ def __init__(self, client, topic, ordering_key): # Batches ordered from first (head/left) to last (right/tail). # Invariant: always has at least one batch after the first publish, # unless paused or stopped. - self._ordered_batches = collections.deque() + self._ordered_batches: Deque["_batch.thread.Batch"] = collections.deque() # See _OrderedSequencerStatus for valid state transitions. self._state = _OrderedSequencerStatus.ACCEPTING_MESSAGES - def is_finished(self): - """ Whether the sequencer is finished and should be cleaned up. + def is_finished(self) -> bool: + """Whether the sequencer is finished and should be cleaned up. - Returns: - bool: Whether the sequencer is finished and should be cleaned up. + Returns: + Whether the sequencer is finished and should be cleaned up. """ with self._state_lock: return self._state == _OrderedSequencerStatus.FINISHED - def stop(self): - """ Permanently stop this sequencer. + def stop(self) -> None: + """Permanently stop this sequencer. - This differs from pausing, which may be resumed. Immediately commits - the first batch and cancels the rest. + This differs from pausing, which may be resumed. Immediately commits + the first batch and cancels the rest. - Raises: - RuntimeError: - If called after stop() has already been called. + Raises: + RuntimeError: + If called after stop() has already been called. """ with self._state_lock: if self._state == _OrderedSequencerStatus.STOPPED: @@ -131,14 +144,14 @@ def stop(self): batch = self._ordered_batches.pop() batch.cancel(batch_base.BatchCancellationReason.CLIENT_STOPPED) - def commit(self): - """ Commit the first batch, if unpaused. + def commit(self) -> None: + """Commit the first batch, if unpaused. - If paused or no batches exist, this method does nothing. + If paused or no batches exist, this method does nothing. - Raises: - RuntimeError: - If called after stop() has already been called. + Raises: + RuntimeError: + If called after stop() has already been called. """ with self._state_lock: if self._state == _OrderedSequencerStatus.STOPPED: @@ -149,12 +162,12 @@ def commit(self): # operation is idempotent. self._ordered_batches[0].commit() - def _batch_done_callback(self, success): - """ Deal with completion of a batch. + def _batch_done_callback(self, success: bool) -> None: + """Deal with completion of a batch. - Called when a batch has finished publishing, with either a success - or a failure. (Temporary failures are retried infinitely when - ordering keys are enabled.) + Called when a batch has finished publishing, with either a success + or a failure. (Temporary failures are retried infinitely when + ordering keys are enabled.) """ ensure_cleanup_and_commit_timer_runs = False with self._state_lock: @@ -197,11 +210,11 @@ def _batch_done_callback(self, success): if ensure_cleanup_and_commit_timer_runs: self._client.ensure_cleanup_and_commit_timer_runs() - def _pause(self): - """ Pause this sequencer: set state to paused, cancel all batches, and - clear the list of ordered batches. + def _pause(self) -> None: + """Pause this sequencer: set state to paused, cancel all batches, and + clear the list of ordered batches. - _state_lock must be taken before calling this method. + _state_lock must be taken before calling this method. """ assert ( self._state != _OrderedSequencerStatus.FINISHED @@ -213,8 +226,8 @@ def _pause(self): ) self._ordered_batches.clear() - def unpause(self): - """ Unpause this sequencer. + def unpause(self) -> None: + """Unpause this sequencer. Raises: RuntimeError: @@ -225,9 +238,19 @@ def unpause(self): raise RuntimeError("Ordering key is not paused.") self._state = _OrderedSequencerStatus.ACCEPTING_MESSAGES - def _create_batch(self): - """ Create a new batch using the client's batch class and other stored + def _create_batch( + self, + commit_retry: "OptionalRetry" = gapic_v1.method.DEFAULT, + commit_timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, + ) -> "_batch.thread.Batch": + """Create a new batch using the client's batch class and other stored settings. + + Args: + commit_retry: + The retry settings to apply when publishing the batch. + commit_timeout: + The timeout to apply when publishing the batch. """ return self._client._batch_class( client=self._client, @@ -235,13 +258,25 @@ def _create_batch(self): settings=self._client.batch_settings, batch_done_callback=self._batch_done_callback, commit_when_full=False, + commit_retry=commit_retry, + commit_timeout=commit_timeout, ) - def publish(self, message): - """ Publish message for this ordering key. + def publish( + self, + wrapper: PublishMessageWrapper, + retry: "OptionalRetry" = gapic_v1.method.DEFAULT, + timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, + ) -> futures.Future: + """Publish message for this ordering key. Args: - message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + wrapper: + The Pub/Sub message wrapper. + retry: + The retry settings to apply when publishing the message. + timeout: + The timeout to apply when publishing the message. Returns: A class instance that conforms to Python Standard library's @@ -257,12 +292,12 @@ def publish(self, message): """ with self._state_lock: if self._state == _OrderedSequencerStatus.PAUSED: - future = futures.Future() + errored_future = futures.Future() exception = exceptions.PublishToPausedOrderingKeyException( self._ordering_key ) - future.set_exception(exception) - return future + errored_future.set_exception(exception) + return errored_future # If waiting to be cleaned-up, convert to accepting messages to # prevent this sequencer from being cleaned-up only to have another @@ -278,26 +313,28 @@ def publish(self, message): ), "Publish is only allowed in accepting-messages state." if not self._ordered_batches: - new_batch = self._create_batch() + new_batch = self._create_batch( + commit_retry=retry, commit_timeout=timeout + ) self._ordered_batches.append(new_batch) batch = self._ordered_batches[-1] - future = batch.publish(message) + future = batch.publish(wrapper) while future is None: - batch = self._create_batch() + batch = self._create_batch(commit_retry=retry, commit_timeout=timeout) self._ordered_batches.append(batch) - future = batch.publish(message) + future = batch.publish(wrapper) return future # Used only for testing. - def _set_batch(self, batch): + def _set_batch(self, batch: "_batch.thread.Batch") -> None: self._ordered_batches = collections.deque([batch]) # Used only for testing. - def _set_batches(self, batches): + def _set_batches(self, batches: Iterable["_batch.thread.Batch"]) -> None: self._ordered_batches = collections.deque(batches) # Used only for testing. - def _get_batches(self): + def _get_batches(self) -> Sequence["_batch.thread.Batch"]: return self._ordered_batches diff --git a/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py b/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py index dff114630..7dbd3f084 100644 --- a/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py +++ b/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py @@ -12,26 +12,42 @@ # See the License for the specific language governing permissions and # limitations under the License. +import typing +from typing import Optional + +from google.api_core import gapic_v1 + from google.cloud.pubsub_v1.publisher._sequencer import base +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) + +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud.pubsub_v1.publisher import _batch + from google.cloud.pubsub_v1.publisher import futures + from google.cloud.pubsub_v1.publisher.client import Client as PublisherClient + from google.pubsub_v1.services.publisher.client import OptionalRetry + + from google.cloud.pubsub_v1 import types class UnorderedSequencer(base.Sequencer): - """ Sequences messages into batches for one topic without any ordering. + """Sequences messages into batches for one topic without any ordering. - Public methods are NOT thread-safe. + Public methods are NOT thread-safe. """ - def __init__(self, client, topic): + def __init__(self, client: "PublisherClient", topic: str): self._client = client self._topic = topic - self._current_batch = None + self._current_batch: Optional["_batch.thread.Batch"] = None self._stopped = False - def is_finished(self): - """ Whether the sequencer is finished and should be cleaned up. + def is_finished(self) -> bool: + """Whether the sequencer is finished and should be cleaned up. - Returns: - bool: Whether the sequencer is finished and should be cleaned up. + Returns: + Whether the sequencer is finished and should be cleaned up. """ # TODO: Implement. Not implementing yet because of possible performance # impact due to extra locking required. This does mean that @@ -39,26 +55,26 @@ def is_finished(self): # previously existing behavior. return False - def stop(self): - """ Stop the sequencer. + def stop(self) -> None: + """Stop the sequencer. - Subsequent publishes will fail. + Subsequent publishes will fail. - Raises: - RuntimeError: - If called after stop() has already been called. + Raises: + RuntimeError: + If called after stop() has already been called. """ if self._stopped: raise RuntimeError("Unordered sequencer already stopped.") self.commit() self._stopped = True - def commit(self): - """ Commit the batch. + def commit(self) -> None: + """Commit the batch. - Raises: - RuntimeError: - If called after stop() has already been called. + Raises: + RuntimeError: + If called after stop() has already been called. """ if self._stopped: raise RuntimeError("Unordered sequencer already stopped.") @@ -71,13 +87,23 @@ def commit(self): # batch. self._current_batch = None - def unpause(self): - """ Not relevant for this class. """ + def unpause(self) -> typing.NoReturn: + """Not relevant for this class.""" raise NotImplementedError - def _create_batch(self): - """ Create a new batch using the client's batch class and other stored + def _create_batch( + self, + commit_retry: "OptionalRetry" = gapic_v1.method.DEFAULT, + commit_timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, + ) -> "_batch.thread.Batch": + """Create a new batch using the client's batch class and other stored settings. + + Args: + commit_retry: + The retry settings to apply when publishing the batch. + commit_timeout: + The timeout to apply when publishing the batch. """ return self._client._batch_class( client=self._client, @@ -85,18 +111,29 @@ def _create_batch(self): settings=self._client.batch_settings, batch_done_callback=None, commit_when_full=True, + commit_retry=commit_retry, + commit_timeout=commit_timeout, ) - def publish(self, message): - """ Batch message into existing or new batch. + def publish( + self, + wrapper: PublishMessageWrapper, + retry: "OptionalRetry" = gapic_v1.method.DEFAULT, + timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, + ) -> "futures.Future": + """Batch message into existing or new batch. Args: - message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + wrapper: + The Pub/Sub message wrapper. + retry: + The retry settings to apply when publishing the message. + timeout: + The timeout to apply when publishing the message. Returns: - ~google.api_core.future.Future: An object conforming to - the :class:`~concurrent.futures.Future` interface. The future tracks - the publishing status of the message. + An object conforming to the :class:`~concurrent.futures.Future` interface. + The future tracks the publishing status of the message. Raises: RuntimeError: @@ -109,22 +146,22 @@ def publish(self, message): raise RuntimeError("Unordered sequencer already stopped.") if not self._current_batch: - newbatch = self._create_batch() + newbatch = self._create_batch(commit_retry=retry, commit_timeout=timeout) self._current_batch = newbatch batch = self._current_batch future = None while future is None: # Might throw MessageTooLargeError - future = batch.publish(message) + future = batch.publish(wrapper) # batch is full, triggering commit_when_full if future is None: - batch = self._create_batch() + batch = self._create_batch(commit_retry=retry, commit_timeout=timeout) # At this point, we lose track of the old batch, but we don't # care since it's already committed (because it was full.) self._current_batch = batch return future # Used only for testing. - def _set_batch(self, batch): + def _set_batch(self, batch: "_batch.thread.Batch") -> None: self._current_batch = batch diff --git a/google/cloud/pubsub_v1/publisher/client.py b/google/cloud/pubsub_v1/publisher/client.py index 8dbbea634..0740e3185 100644 --- a/google/cloud/pubsub_v1/publisher/client.py +++ b/google/cloud/pubsub_v1/publisher/client.py @@ -17,48 +17,51 @@ import copy import logging import os -import pkg_resources import threading import time +import typing +from typing import Any, Dict, Optional, Sequence, Tuple, Type, Union +import warnings +import sys -import grpc -import six +from google.api_core import gapic_v1 +from google.auth.credentials import AnonymousCredentials # type: ignore +from google.oauth2 import service_account # type: ignore -from google.api_core import grpc_helpers -from google.oauth2 import service_account - -from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.gapic import publisher_client -from google.cloud.pubsub_v1.gapic.transports import publisher_grpc_transport from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher import futures from google.cloud.pubsub_v1.publisher._batch import thread from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer from google.cloud.pubsub_v1.publisher._sequencer import unordered_sequencer from google.cloud.pubsub_v1.publisher.flow_controller import FlowController +from google.pubsub_v1 import gapic_version as package_version +from google.pubsub_v1 import types as gapic_types +from google.pubsub_v1.services.publisher import client as publisher_client +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) + +__version__ = package_version.__version__ + +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1.publisher import _batch + from google.pubsub_v1.services.publisher.client import OptionalRetry + from google.pubsub_v1.types import pubsub as pubsub_types -__version__ = pkg_resources.get_distribution("google-cloud-pubsub").version _LOGGER = logging.getLogger(__name__) -_BLACKLISTED_METHODS = ( - "publish", - "from_service_account_file", - "from_service_account_json", -) +_raw_proto_pubbsub_message = gapic_types.PubsubMessage.pb() -def _set_nested_value(container, value, keys): - current = container - for key in keys[:-1]: - current = current.setdefault(key, {}) - current[keys[-1]] = value - return container +SequencerType = Union[ + ordered_sequencer.OrderedSequencer, unordered_sequencer.UnorderedSequencer +] -@_gapic.add_methods(publisher_client.PublisherClient, blacklist=_BLACKLISTED_METHODS) -class Client(object): +class Client(publisher_client.PublisherClient): """A publisher client for Google Cloud Pub/Sub. This creates an object that is capable of publishing messages. @@ -66,20 +69,18 @@ class Client(object): get sensible defaults. Args: - batch_settings (~google.cloud.pubsub_v1.types.BatchSettings): The - settings for batch publishing. - publisher_options (~google.cloud.pubsub_v1.types.PublisherOptions): The - options for the publisher client. Note that enabling message ordering will - override the publish retry timeout to be infinite. - kwargs (dict): Any additional arguments provided are sent as keyword - arguments to the underlying + batch_settings: + The settings for batch publishing. + publisher_options: + The options for the publisher client. Note that enabling message ordering + will override the publish retry timeout to be infinite. + kwargs: + Any additional arguments provided are sent as keyword arguments to the + underlying :class:`~google.cloud.pubsub_v1.gapic.publisher_client.PublisherClient`. Generally you should not need to set additional keyword - arguments. Optionally, publish retry settings can be set via - ``client_config`` where user-provided retry configurations are - applied to default retry settings. And regional endpoints can be - set via ``client_options`` that takes a single key-value pair that - defines the endpoint. + arguments. Regional endpoints can be set via ``client_options`` that + takes a single key-value pair that defines the endpoint. Example: @@ -103,19 +104,6 @@ class Client(object): ), ), - # Optional - client_config = { - "interfaces": { - "google.pubsub.v1.Publisher": { - "retry_params": { - "messaging": { - 'total_timeout_millis': 650000, # default: 600000 - } - } - } - } - }, - # Optional client_options = { "api_endpoint": REGIONAL_ENDPOINT @@ -123,76 +111,41 @@ class Client(object): ) """ - def __init__(self, batch_settings=(), publisher_options=(), **kwargs): + def __init__( + self, + batch_settings: Union[types.BatchSettings, Sequence] = (), + publisher_options: Union[types.PublisherOptions, Sequence] = (), + **kwargs: Any, + ): assert ( type(batch_settings) is types.BatchSettings or len(batch_settings) == 0 - ), "batch_settings must be of type BatchSettings or an empty tuple." + ), "batch_settings must be of type BatchSettings or an empty sequence." assert ( type(publisher_options) is types.PublisherOptions or len(publisher_options) == 0 - ), "publisher_options must be of type PublisherOptions or an empty tuple." + ), "publisher_options must be of type PublisherOptions or an empty sequence." # Sanity check: Is our goal to use the emulator? # If so, create a grpc insecure channel with the emulator host # as the target. + # TODO(https://github.com/googleapis/python-pubsub/issues/1349): Move the emulator + # code below to test files. if os.environ.get("PUBSUB_EMULATOR_HOST"): - kwargs["channel"] = grpc.insecure_channel( - target=os.environ.get("PUBSUB_EMULATOR_HOST") - ) - - client_options = kwargs.pop("client_options", None) - if ( - client_options - and "api_endpoint" in client_options - and isinstance(client_options["api_endpoint"], six.string_types) - ): - self._target = client_options["api_endpoint"] - else: - self._target = publisher_client.PublisherClient.SERVICE_ADDRESS - - # Use a custom channel. - # We need this in order to set appropriate default message size and - # keepalive options. - if "transport" not in kwargs: - channel = kwargs.pop("channel", None) - if channel is None: - channel = grpc_helpers.create_channel( - credentials=kwargs.pop("credentials", None), - target=self.target, - scopes=publisher_client.PublisherClient._DEFAULT_SCOPES, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - # cannot pass both 'channel' and 'credentials' - kwargs.pop("credentials", None) - transport = publisher_grpc_transport.PublisherGrpcTransport(channel=channel) - kwargs["transport"] = transport + kwargs["client_options"] = { + "api_endpoint": os.environ.get("PUBSUB_EMULATOR_HOST") + } + # Configure credentials directly to transport, if provided. + if "transport" not in kwargs: + kwargs["credentials"] = AnonymousCredentials() # For a transient failure, retry publishing the message infinitely. self.publisher_options = types.PublisherOptions(*publisher_options) self._enable_message_ordering = self.publisher_options[0] - if self._enable_message_ordering: - # Set retry timeout to "infinite" when message ordering is enabled. - # Note that this then also impacts messages added with an empty ordering - # key. - client_config = _set_nested_value( - kwargs.pop("client_config", {}), - 2 ** 32, - [ - "interfaces", - "google.pubsub.v1.Publisher", - "retry_params", - "messaging", - "total_timeout_millis", - ], - ) - kwargs["client_config"] = client_config # Add the metrics headers, and instantiate the underlying GAPIC # client. - self.api = publisher_client.PublisherClient(**kwargs) + super().__init__(**kwargs) + self._target = self._transport._host self._batch_class = thread.Batch self.batch_settings = types.BatchSettings(*batch_settings) @@ -200,48 +153,93 @@ def __init__(self, batch_settings=(), publisher_options=(), **kwargs): # messages. One batch exists for each topic. self._batch_lock = self._batch_class.make_lock() # (topic, ordering_key) => sequencers object - self._sequencers = {} + self._sequencers: Dict[Tuple[str, str], SequencerType] = {} self._is_stopped = False # Thread created to commit all sequencers after a timeout. - self._commit_thread = None + self._commit_thread: Optional[threading.Thread] = None # The object controlling the message publishing flow self._flow_controller = FlowController(self.publisher_options.flow_control) + self._open_telemetry_enabled = ( + self.publisher_options.enable_open_telemetry_tracing + ) + # OpenTelemetry features used by the library are not supported in Python versions <= 3.7. + # Refer https://github.com/open-telemetry/opentelemetry-python/issues/3993#issuecomment-2211976389 + if ( + self.publisher_options.enable_open_telemetry_tracing + and sys.version_info.major == 3 + and sys.version_info.minor < 8 + ): + warnings.warn( + message="Open Telemetry for Python version 3.7 or lower is not supported. Disabling Open Telemetry tracing.", + category=RuntimeWarning, + ) + self._open_telemetry_enabled = False + @classmethod - def from_service_account_file(cls, filename, batch_settings=(), **kwargs): + def from_service_account_file( # type: ignore[override] + cls, + filename: str, + batch_settings: Union[types.BatchSettings, Sequence] = (), + **kwargs: Any, + ) -> "Client": """Creates an instance of this client using the provided credentials file. Args: - filename (str): The path to the service account private key json - file. - batch_settings (~google.cloud.pubsub_v1.types.BatchSettings): The - settings for batch publishing. - kwargs: Additional arguments to pass to the constructor. + filename: + The path to the service account private key JSON file. + batch_settings: + The settings for batch publishing. + kwargs: + Additional arguments to pass to the constructor. Returns: - A Publisher :class:`~google.cloud.pubsub_v1.publisher.client.Client` - instance that is the constructed client. + A Publisher instance that is the constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials return cls(batch_settings, **kwargs) - from_service_account_json = from_service_account_file + from_service_account_json = from_service_account_file # type: ignore[assignment] @property - def target(self): + def target(self) -> str: """Return the target (where the API is). Returns: - str: The location of the API. + The location of the API. """ return self._target - def _get_or_create_sequencer(self, topic, ordering_key): - """ Get an existing sequencer or create a new one given the (topic, - ordering_key) pair. + @property + def api(self): + """The underlying gapic API client. + + .. versionchanged:: 2.10.0 + Instead of a GAPIC ``PublisherClient`` client instance, this property is a + proxy object to it with the same interface. + + .. deprecated:: 2.10.0 + Use the GAPIC methods and properties on the client instance directly + instead of through the :attr:`api` attribute. + """ + msg = ( + 'The "api" property only exists for backward compatibility, access its ' + 'attributes directly thorugh the client instance (e.g. "client.foo" ' + 'instead of "client.api.foo").' + ) + warnings.warn(msg, category=DeprecationWarning) + return super() + + @property + def open_telemetry_enabled(self) -> bool: + return self._open_telemetry_enabled + + def _get_or_create_sequencer(self, topic: str, ordering_key: str) -> SequencerType: + """Get an existing sequencer or create a new one given the (topic, + ordering_key) pair. """ sequencer_key = (topic, ordering_key) sequencer = self._sequencers.get(sequencer_key) @@ -256,11 +254,11 @@ def _get_or_create_sequencer(self, topic, ordering_key): return sequencer - def resume_publish(self, topic, ordering_key): - """ Resume publish on an ordering key that has had unrecoverable errors. + def resume_publish(self, topic: str, ordering_key: str) -> None: + """Resume publish on an ordering key that has had unrecoverable errors. Args: - topic (str): The topic to publish messages to. + topic: The topic to publish messages to. ordering_key: A string that identifies related messages for which publish order should be respected. @@ -292,7 +290,19 @@ def resume_publish(self, topic, ordering_key): else: sequencer.unpause() - def publish(self, topic, data, ordering_key="", **attrs): + def _gapic_publish(self, *args, **kwargs) -> "pubsub_types.PublishResponse": + """Call the GAPIC public API directly.""" + return super().publish(*args, **kwargs) + + def publish( # type: ignore[override] + self, + topic: str, + data: bytes, + ordering_key: str = "", + retry: "OptionalRetry" = gapic_v1.method.DEFAULT, + timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, + **attrs: Union[bytes, str], + ) -> "pubsub_v1.publisher.futures.Future": """Publish a single message. .. note:: @@ -310,6 +320,8 @@ def publish(self, topic, data, ordering_key="", **attrs): Add the given message to this object; this will cause it to be published once the batch either has enough messages or a sufficient period of time has elapsed. + This method may block if LimitExceededBehavior.BLOCK is used in the + flow control settings. Example: >>> from google.cloud import pubsub_v1 @@ -319,15 +331,22 @@ def publish(self, topic, data, ordering_key="", **attrs): >>> response = client.publish(topic, data, username='guido') Args: - topic (str): The topic to publish messages to. - data (bytes): A bytestring representing the message body. This + topic: The topic to publish messages to. + data: A bytestring representing the message body. This must be a bytestring. ordering_key: A string that identifies related messages for which publish order should be respected. Message ordering must be enabled for this client to use this feature. - EXPERIMENTAL: This feature is currently available in a closed - alpha. Please contact the Cloud Pub/Sub team to use it. - attrs (Mapping[str, str]): A dictionary of attributes to be + retry: + Designation of what errors, if any, should be retried. If `ordering_key` + is specified, the total retry deadline will be changed to "infinity". + If given, it overides any retry passed into the client through + the ``publisher_options`` argument. + timeout: + The timeout for the RPC request. Can be used to override any timeout + passed in through ``publisher_options`` when instantiating the client. + + attrs: A dictionary of attributes to be sent as metadata. (These may be text strings or byte strings.) Returns: @@ -346,7 +365,7 @@ def publish(self, topic, data, ordering_key="", **attrs): """ # Sanity check: Is the data being sent as a bytestring? # If it is literally anything else, complain loudly about it. - if not isinstance(data, six.binary_type): + if not isinstance(data, bytes): raise TypeError( "Data being published to Pub/Sub must be sent as a bytestring." ) @@ -359,9 +378,9 @@ def publish(self, topic, data, ordering_key="", **attrs): # Coerce all attributes to text strings. for k, v in copy.copy(attrs).items(): - if isinstance(v, six.text_type): + if isinstance(v, str): continue - if isinstance(v, six.binary_type): + if isinstance(v, bytes): attrs[k] = v.decode("utf-8") continue raise TypeError( @@ -369,16 +388,49 @@ def publish(self, topic, data, ordering_key="", **attrs): "be sent as text strings." ) - # Create the Pub/Sub message object. - message = types.PubsubMessage( + # Create the Pub/Sub message object. For performance reasons, the message + # should be constructed by directly using the raw protobuf class, and only + # then wrapping it into the higher-level PubsubMessage class. + vanilla_pb = _raw_proto_pubbsub_message( data=data, ordering_key=ordering_key, attributes=attrs ) + message = gapic_types.PubsubMessage.wrap(vanilla_pb) + + wrapper: PublishMessageWrapper = PublishMessageWrapper(message) + if self._open_telemetry_enabled: + wrapper.start_create_span(topic=topic, ordering_key=ordering_key) # Messages should go through flow control to prevent excessive # queuing on the client side (depending on the settings). try: + if self._open_telemetry_enabled: + if wrapper: + wrapper.start_publisher_flow_control_span() + else: # pragma: NO COVER + warnings.warn( + message="PubSubMessageWrapper is None. Not starting publisher flow control span.", + category=RuntimeWarning, + ) self._flow_controller.add(message) + if self._open_telemetry_enabled: + if wrapper: + wrapper.end_publisher_flow_control_span() + else: # pragma: NO COVER + warnings.warn( + message="PubSubMessageWrapper is None. Not ending publisher flow control span.", + category=RuntimeWarning, + ) except exceptions.FlowControlLimitError as exc: + if self._open_telemetry_enabled: + if wrapper: + wrapper.end_publisher_flow_control_span(exc) + wrapper.end_create_span(exc) + else: # pragma: NO COVER + warnings.warn( + message="PubSubMessageWrapper is None. Not ending publisher create and flow control spans on FlowControlLimitError.", + category=RuntimeWarning, + ) + future = futures.Future() future.set_exception(exc) return future @@ -386,15 +438,74 @@ def publish(self, topic, data, ordering_key="", **attrs): def on_publish_done(future): self._flow_controller.release(message) - with self._batch_lock: - if self._is_stopped: - raise RuntimeError("Cannot publish on a stopped publisher.") + if retry is gapic_v1.method.DEFAULT: # if custom retry not passed in + retry = self.publisher_options.retry - sequencer = self._get_or_create_sequencer(topic, ordering_key) + if timeout is gapic_v1.method.DEFAULT: # if custom timeout not passed in + timeout = self.publisher_options.timeout - # Delegate the publishing to the sequencer. - future = sequencer.publish(message) - future.add_done_callback(on_publish_done) + if self._open_telemetry_enabled: + if wrapper: + wrapper.start_publisher_batching_span() + else: # pragma: NO COVER + warnings.warn( + message="PublishMessageWrapper is None. Hence, not starting publisher batching span", + category=RuntimeWarning, + ) + with self._batch_lock: + try: + if self._is_stopped: + raise RuntimeError("Cannot publish on a stopped publisher.") + + # Set retry timeout to "infinite" when message ordering is enabled. + # Note that this then also impacts messages added with an empty + # ordering key. + if self._enable_message_ordering: + if retry is gapic_v1.method.DEFAULT: + # use the default retry for the publish GRPC method as a base + transport = self._transport + base_retry = transport._wrapped_methods[ + transport.publish + ]._retry + retry = base_retry.with_deadline(2.0**32) + # timeout needs to be overridden and set to infinite in + # addition to the retry deadline since both determine + # the duration for which retries are attempted. + timeout = 2.0**32 + elif retry is not None: + retry = retry.with_deadline(2.0**32) + timeout = 2.0**32 + + # Delegate the publishing to the sequencer. + sequencer = self._get_or_create_sequencer(topic, ordering_key) + future = sequencer.publish( + wrapper=wrapper, retry=retry, timeout=timeout + ) + future.add_done_callback(on_publish_done) + except BaseException as be: + # Exceptions can be thrown when attempting to add messages to + # the batch. If they're thrown, record them in publisher + # batching and create span, end the spans and bubble the + # exception up. + if self._open_telemetry_enabled: + if wrapper: + wrapper.end_publisher_batching_span(be) + wrapper.end_create_span(be) + else: # pragma: NO COVER + warnings.warn( + message="PublishMessageWrapper is None. Hence, not recording exception and ending publisher batching span and create span", + category=RuntimeWarning, + ) + raise be + + if self._open_telemetry_enabled: + if wrapper: + wrapper.end_publisher_batching_span() + else: # pragma: NO COVER + warnings.warn( + message="PublishMessageWrapper is None. Hence, not ending publisher batching span", + category=RuntimeWarning, + ) # Create a timer thread if necessary to enforce the batching # timeout. @@ -402,33 +513,37 @@ def on_publish_done(future): return future - def ensure_cleanup_and_commit_timer_runs(self): - """ Ensure a cleanup/commit timer thread is running. + def ensure_cleanup_and_commit_timer_runs(self) -> None: + """Ensure a cleanup/commit timer thread is running. - If a cleanup/commit timer thread is already running, this does nothing. + If a cleanup/commit timer thread is already running, this does nothing. """ with self._batch_lock: self._ensure_commit_timer_runs_no_lock() - def _ensure_commit_timer_runs_no_lock(self): - """ Ensure a commit timer thread is running, without taking - _batch_lock. + def _ensure_commit_timer_runs_no_lock(self) -> None: + """Ensure a commit timer thread is running, without taking + _batch_lock. - _batch_lock must be held before calling this method. + _batch_lock must be held before calling this method. """ if not self._commit_thread and self.batch_settings.max_latency < float("inf"): self._start_commit_thread() - def _start_commit_thread(self): + def _start_commit_thread(self) -> None: """Start a new thread to actually wait and commit the sequencers.""" + # NOTE: If the thread is *not* a daemon, a memory leak exists due to a CPython issue. + # https://github.com/googleapis/python-pubsub/issues/395#issuecomment-829910303 + # https://github.com/googleapis/python-pubsub/issues/395#issuecomment-830092418 self._commit_thread = threading.Thread( - name="Thread-PubSubBatchCommitter", target=self._wait_and_commit_sequencers + name="Thread-PubSubBatchCommitter", + target=self._wait_and_commit_sequencers, + daemon=True, ) self._commit_thread.start() - def _wait_and_commit_sequencers(self): - """ Wait up to the batching timeout, and commit all sequencers. - """ + def _wait_and_commit_sequencers(self) -> None: + """Wait up to the batching timeout, and commit all sequencers.""" # Sleep for however long we should be waiting. time.sleep(self.batch_settings.max_latency) _LOGGER.debug("Commit thread is waking up") @@ -439,8 +554,8 @@ def _wait_and_commit_sequencers(self): self._commit_sequencers() self._commit_thread = None - def _commit_sequencers(self): - """ Clean up finished sequencers and commit the rest. """ + def _commit_sequencers(self) -> None: + """Clean up finished sequencers and commit the rest.""" finished_sequencer_keys = [ key for key, sequencer in self._sequencers.items() @@ -452,7 +567,7 @@ def _commit_sequencers(self): for sequencer in self._sequencers.values(): sequencer.commit() - def stop(self): + def stop(self) -> None: """Immediately publish all outstanding messages. Asynchronously sends all outstanding messages and @@ -481,15 +596,19 @@ def stop(self): sequencer.stop() # Used only for testing. - def _set_batch(self, topic, batch, ordering_key=""): + def _set_batch( + self, topic: str, batch: "_batch.thread.Batch", ordering_key: str = "" + ) -> None: sequencer = self._get_or_create_sequencer(topic, ordering_key) sequencer._set_batch(batch) # Used only for testing. - def _set_batch_class(self, batch_class): + def _set_batch_class(self, batch_class: Type) -> None: self._batch_class = batch_class # Used only for testing. - def _set_sequencer(self, topic, sequencer, ordering_key=""): + def _set_sequencer( + self, topic: str, sequencer: SequencerType, ordering_key: str = "" + ) -> None: sequencer_key = (topic, ordering_key) self._sequencers[sequencer_key] = sequencer diff --git a/google/cloud/pubsub_v1/publisher/exceptions.py b/google/cloud/pubsub_v1/publisher/exceptions.py index 89b3790a0..f2b65299e 100644 --- a/google/cloud/pubsub_v1/publisher/exceptions.py +++ b/google/cloud/pubsub_v1/publisher/exceptions.py @@ -27,13 +27,13 @@ class MessageTooLargeError(ValueError): class PublishToPausedOrderingKeyException(Exception): - """ Publish attempted to paused ordering key. To resume publishing, call - the resumePublish method on the publisher Client object with this - ordering key. Ordering keys are paused if an unrecoverable error - occurred during publish of a batch for that key. + """Publish attempted to paused ordering key. To resume publishing, call + the resumePublish method on the publisher Client object with this + ordering key. Ordering keys are paused if an unrecoverable error + occurred during publish of a batch for that key. """ - def __init__(self, ordering_key): + def __init__(self, ordering_key: str): self.ordering_key = ordering_key super(PublishToPausedOrderingKeyException, self).__init__() diff --git a/google/cloud/pubsub_v1/publisher/flow_controller.py b/google/cloud/pubsub_v1/publisher/flow_controller.py index c10fadcef..baf6ba8ff 100644 --- a/google/cloud/pubsub_v1/publisher/flow_controller.py +++ b/google/cloud/pubsub_v1/publisher/flow_controller.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import deque +from collections import OrderedDict import logging import threading +from typing import Dict, Optional, Type import warnings from google.cloud.pubsub_v1 import types @@ -24,23 +25,34 @@ _LOGGER = logging.getLogger(__name__) -class _QuantityReservation(object): - """A (partial) reservation of a quantifiable resource.""" +MessageType = Type[types.PubsubMessage] # type: ignore - def __init__(self, reserved, needed): - self.reserved = reserved - self.needed = needed + +class _QuantityReservation: + """A (partial) reservation of quantifiable resources.""" + + def __init__(self, bytes_reserved: int, bytes_needed: int, has_slot: bool): + self.bytes_reserved = bytes_reserved + self.bytes_needed = bytes_needed + self.has_slot = has_slot + + def __repr__(self): + return ( + f"{type(self).__name__}(" + f"bytes_reserved={self.bytes_reserved}, " + f"bytes_needed={self.bytes_needed}, " + f"has_slot={self.has_slot})" + ) class FlowController(object): """A class used to control the flow of messages passing through it. Args: - settings (~google.cloud.pubsub_v1.types.PublishFlowControl): - Desired flow control configuration. + settings: Desired flow control configuration. """ - def __init__(self, settings): + def __init__(self, settings: types.PublishFlowControl): self._settings = settings # Load statistics. They represent the number of messages added, but not @@ -48,14 +60,13 @@ def __init__(self, settings): self._message_count = 0 self._total_bytes = 0 - # A FIFO queue of threads blocked on adding a message, from first to last. + # A FIFO queue of threads blocked on adding a message that also tracks their + # reservations of available flow control bytes and message slots. # Only relevant if the configured limit exceeded behavior is BLOCK. - self._waiting = deque() + self._waiting: Dict[threading.Thread, _QuantityReservation] = OrderedDict() - # Reservations of available flow control bytes by the waiting threads. - # Each value is a _QuantityReservation instance. - self._byte_reservations = dict() self._reserved_bytes = 0 + self._reserved_slots = 0 # The lock is used to protect all internal state (message and byte count, # waiting threads to add, etc.). @@ -64,14 +75,14 @@ def __init__(self, settings): # The condition for blocking the flow if capacity is exceeded. self._has_capacity = threading.Condition(lock=self._operational_lock) - def add(self, message): + def add(self, message: MessageType) -> None: """Add a message to flow control. Adding a message updates the internal load statistics, and an action is taken if these limits are exceeded (depending on the flow control settings). Args: - message (:class:`~google.cloud.pubsub_v1.types.PubsubMessage`): + message: The message entering the flow control. Raises: @@ -88,7 +99,7 @@ def add(self, message): with self._operational_lock: if not self._would_overflow(message): self._message_count += 1 - self._total_bytes += message.ByteSize() + self._total_bytes += message._pb.ByteSize() return # Adding a message would overflow, react. @@ -101,7 +112,7 @@ def add(self, message): # load if we accepted the message. load_info = self._load_info( message_count=self._message_count + 1, - total_bytes=self._total_bytes + message.ByteSize(), + total_bytes=self._total_bytes + message._pb.ByteSize(), ) error_msg = "Flow control limits would be exceeded - {}.".format( load_info @@ -116,11 +127,11 @@ def add(self, message): # Sanity check - if a message exceeds total flow control limits all # by itself, it would block forever, thus raise error. if ( - message.ByteSize() > self._settings.byte_limit + message._pb.ByteSize() > self._settings.byte_limit or self._settings.message_limit < 1 ): load_info = self._load_info( - message_count=1, total_bytes=message.ByteSize() + message_count=1, total_bytes=message._pb.ByteSize() ) error_msg = ( "Total flow control limits too low for the message, " @@ -131,11 +142,13 @@ def add(self, message): current_thread = threading.current_thread() while self._would_overflow(message): - if current_thread not in self._byte_reservations: - self._waiting.append(current_thread) - self._byte_reservations[current_thread] = _QuantityReservation( - reserved=0, needed=message.ByteSize() + if current_thread not in self._waiting: + reservation = _QuantityReservation( + bytes_reserved=0, + bytes_needed=message._pb.ByteSize(), + has_slot=False, ) + self._waiting[current_thread] = reservation # Will be placed last. _LOGGER.debug( "Blocking until there is enough free capacity in the flow - " @@ -151,16 +164,16 @@ def add(self, message): # Message accepted, increase the load and remove thread stats. self._message_count += 1 - self._total_bytes += message.ByteSize() - self._reserved_bytes -= self._byte_reservations[current_thread].reserved - del self._byte_reservations[current_thread] - self._waiting.remove(current_thread) + self._total_bytes += message._pb.ByteSize() + self._reserved_bytes -= self._waiting[current_thread].bytes_reserved + self._reserved_slots -= 1 + del self._waiting[current_thread] - def release(self, message): + def release(self, message: MessageType) -> None: """Release a mesage from flow control. Args: - message (:class:`~google.cloud.pubsub_v1.types.PubsubMessage`): + message: The message entering the flow control. """ if self._settings.limit_exceeded_behavior == types.LimitExceededBehavior.IGNORE: @@ -169,7 +182,7 @@ def release(self, message): with self._operational_lock: # Releasing a message decreases the load. self._message_count -= 1 - self._total_bytes -= message.ByteSize() + self._total_bytes -= message._pb.ByteSize() if self._message_count < 0 or self._total_bytes < 0: warnings.warn( @@ -180,85 +193,99 @@ def release(self, message): self._message_count = max(0, self._message_count) self._total_bytes = max(0, self._total_bytes) - self._distribute_available_bytes() + self._distribute_available_capacity() # If at least one thread waiting to add() can be unblocked, wake them up. if self._ready_to_unblock(): _LOGGER.debug("Notifying threads waiting to add messages to flow.") self._has_capacity.notify_all() - def _distribute_available_bytes(self): - """Distribute availalbe free capacity among the waiting threads in FIFO order. + def _distribute_available_capacity(self) -> None: + """Distribute available capacity among the waiting threads in FIFO order. The method assumes that the caller has obtained ``_operational_lock``. """ - available = self._settings.byte_limit - self._total_bytes - self._reserved_bytes + available_slots = ( + self._settings.message_limit - self._message_count - self._reserved_slots + ) + available_bytes = ( + self._settings.byte_limit - self._total_bytes - self._reserved_bytes + ) + + for reservation in self._waiting.values(): + if available_slots <= 0 and available_bytes <= 0: + break # Santa is now empty-handed, better luck next time. + + # Distribute any free slots. + if available_slots > 0 and not reservation.has_slot: + reservation.has_slot = True + self._reserved_slots += 1 + available_slots -= 1 - for thread in self._waiting: - if available <= 0: - break + # Distribute any free bytes. + if available_bytes <= 0: + continue - reservation = self._byte_reservations[thread] - still_needed = reservation.needed - reservation.reserved + bytes_still_needed = reservation.bytes_needed - reservation.bytes_reserved - # Sanity check for any internal inconsistencies. - if still_needed < 0: + if bytes_still_needed < 0: # Sanity check for any internal inconsistencies. msg = "Too many bytes reserved: {} / {}".format( - reservation.reserved, reservation.needed + reservation.bytes_reserved, reservation.bytes_needed ) warnings.warn(msg, category=RuntimeWarning) - still_needed = 0 + bytes_still_needed = 0 - can_give = min(still_needed, available) - reservation.reserved += can_give + can_give = min(bytes_still_needed, available_bytes) + reservation.bytes_reserved += can_give self._reserved_bytes += can_give - available -= can_give + available_bytes -= can_give - def _ready_to_unblock(self): + def _ready_to_unblock(self) -> bool: """Determine if any of the threads waiting to add a message can proceed. The method assumes that the caller has obtained ``_operational_lock``. - - Returns: - bool """ if self._waiting: # It's enough to only check the head of the queue, because FIFO # distribution of any free capacity. - reservation = self._byte_reservations[self._waiting[0]] + first_reservation = next(iter(self._waiting.values())) return ( - reservation.reserved >= reservation.needed - and self._message_count < self._settings.message_limit + first_reservation.bytes_reserved >= first_reservation.bytes_needed + and first_reservation.has_slot ) return False - def _would_overflow(self, message): + def _would_overflow(self, message: MessageType) -> bool: """Determine if accepting a message would exceed flow control limits. The method assumes that the caller has obtained ``_operational_lock``. Args: - message (:class:`~google.cloud.pubsub_v1.types.PubsubMessage`): - The message entering the flow control. - - Returns: - bool + message: The message entering the flow control. """ - reservation = self._byte_reservations.get(threading.current_thread()) + reservation = self._waiting.get(threading.current_thread()) if reservation: - enough_reserved = reservation.reserved >= reservation.needed + enough_reserved = reservation.bytes_reserved >= reservation.bytes_needed + has_slot = reservation.has_slot else: enough_reserved = False + has_slot = False - bytes_taken = self._total_bytes + self._reserved_bytes + message.ByteSize() + bytes_taken = self._total_bytes + self._reserved_bytes + message._pb.ByteSize() size_overflow = bytes_taken > self._settings.byte_limit and not enough_reserved - msg_count_overflow = self._message_count + 1 > self._settings.message_limit + + msg_count_overflow = not has_slot and ( + (self._message_count + self._reserved_slots + 1) + > self._settings.message_limit + ) return size_overflow or msg_count_overflow - def _load_info(self, message_count=None, total_bytes=None, reserved_bytes=None): + def _load_info( + self, message_count: Optional[int] = None, total_bytes: Optional[int] = None + ) -> str: """Return the current flow control load information. The caller can optionally adjust some of the values to fit its reporting @@ -267,31 +294,20 @@ def _load_info(self, message_count=None, total_bytes=None, reserved_bytes=None): The method assumes that the caller has obtained ``_operational_lock``. Args: - message_count (Optional[int]): + message_count: The value to override the current message count with. - total_bytes (Optional[int]): + total_bytes: The value to override the current total bytes with. - reserved_bytes (Optional[int]): - The value to override the current number of reserved bytes with. - - Returns: - str """ - msg = "messages: {} / {}, bytes: {} / {} (reserved: {})" - if message_count is None: message_count = self._message_count if total_bytes is None: total_bytes = self._total_bytes - if reserved_bytes is None: - reserved_bytes = self._reserved_bytes - - return msg.format( - message_count, - self._settings.message_limit, - total_bytes, - self._settings.byte_limit, - reserved_bytes, + return ( + f"messages: {message_count} / {self._settings.message_limit} " + f"(reserved: {self._reserved_slots}), " + f"bytes: {total_bytes} / {self._settings.byte_limit} " + f"(reserved: {self._reserved_bytes})" ) diff --git a/google/cloud/pubsub_v1/publisher/futures.py b/google/cloud/pubsub_v1/publisher/futures.py index fa8a79998..7b5921673 100644 --- a/google/cloud/pubsub_v1/publisher/futures.py +++ b/google/cloud/pubsub_v1/publisher/futures.py @@ -14,8 +14,14 @@ from __future__ import absolute_import +import typing +from typing import Any, Callable, Union + from google.cloud.pubsub_v1 import futures +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud import pubsub_v1 + class Future(futures.Future): """This future object is returned from asychronous Pub/Sub publishing @@ -25,28 +31,53 @@ class Future(futures.Future): ID, unless an error occurs. """ - def result(self, timeout=None): + def cancel(self) -> bool: + """Actions in Pub/Sub generally may not be canceled. + + This method always returns ``False``. + """ + return False + + def cancelled(self) -> bool: + """Actions in Pub/Sub generally may not be canceled. + + This method always returns ``False``. + """ + return False + + def result(self, timeout: Union[int, float, None] = None) -> str: """Return the message ID or raise an exception. This blocks until the message has been published successfully and returns the message ID unless an exception is raised. Args: - timeout (Union[int, float]): The number of seconds before this call + timeout: The number of seconds before this call times out and raises TimeoutError. Returns: - str: The message ID. + The message ID. Raises: concurrent.futures.TimeoutError: If the request times out. Exception: For undefined exceptions in the underlying call execution. """ - # Attempt to get the exception if there is one. - # If there is not one, then we know everything worked, and we can - # return an appropriate value. - err = self.exception(timeout=timeout) - if err is None: - return self._result - raise err + return super().result(timeout=timeout) + + # This exists to make the type checkers happy. + def add_done_callback( + self, callback: Callable[["pubsub_v1.publisher.futures.Future"], Any] + ) -> None: + """Attach a callable that will be called when the future finishes. + + Args: + callback: + A callable that will be called with this future as its only + argument when the future completes or is cancelled. The callable + will always be called by a thread in the same process in which + it was added. If the future has already completed or been + cancelled then the callable will be called immediately. These + callables are called in the order that they were added. + """ + return super().add_done_callback(callback) # type: ignore diff --git a/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index dd324fe21..fe3771432 100644 --- a/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -15,15 +15,43 @@ from __future__ import absolute_import from __future__ import division -import collections +import functools import itertools import logging import math +import time import threading +import typing +from typing import List, Optional, Sequence, Union +import warnings +from google.api_core.retry import exponential_sleep_generator + +from opentelemetry import trace -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber._protocol import requests +from google.cloud.pubsub_v1.subscriber.exceptions import ( + AcknowledgeStatus, +) +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + start_ack_span, + start_nack_span, +) + +if typing.TYPE_CHECKING: # pragma: NO COVER + import queue + from google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager import ( + StreamingPullManager, + ) + + +RequestItem = Union[ + requests.AckRequest, + requests.DropRequest, + requests.LeaseRequest, + requests.ModAckRequest, + requests.NackRequest, +] _LOGGER = logging.getLogger(__name__) @@ -37,28 +65,29 @@ """The maximum amount of time in seconds to wait for additional request items before processing the next batch of requests.""" -_ACK_IDS_BATCH_SIZE = 2500 +_ACK_IDS_BATCH_SIZE = 1000 """The maximum number of ACK IDs to send in a single StreamingPullRequest. +""" -The backend imposes a maximum request size limit of 524288 bytes (512 KiB) per -acknowledge / modifyAckDeadline request. ACK IDs have a maximum size of 164 -bytes, thus we cannot send more than o 524288/176 ~= 2979 ACK IDs in a single -StreamingPullRequest message. +_MIN_EXACTLY_ONCE_DELIVERY_ACK_MODACK_RETRY_DURATION_SECS = 1 +"""The time to wait for the first retry of failed acks and modacks when exactly-once +delivery is enabled.""" -Accounting for some overhead, we should thus only send a maximum of 2500 ACK -IDs at a time. -""" +_MAX_EXACTLY_ONCE_DELIVERY_ACK_MODACK_RETRY_DURATION_SECS = 10 * 60 +"""The maximum amount of time in seconds to retry failed acks and modacks when +exactly-once delivery is enabled.""" class Dispatcher(object): - def __init__(self, manager, queue): + def __init__(self, manager: "StreamingPullManager", queue: "queue.Queue"): self._manager = manager self._queue = queue - self._thread = None + self._thread: Optional[threading.Thread] = None self._operational_lock = threading.Lock() - def start(self): + def start(self) -> None: """Start a thread to dispatch requests queued up by callbacks. + Spawns a thread to run :meth:`dispatch_callback`. """ with self._operational_lock: @@ -78,7 +107,7 @@ def start(self): _LOGGER.debug("Started helper thread %s", thread.name) self._thread = thread - def stop(self): + def stop(self) -> None: with self._operational_lock: if self._thread is not None: # Signal the worker to stop by queueing a "poison pill" @@ -87,46 +116,116 @@ def stop(self): self._thread = None - def dispatch_callback(self, items): + def dispatch_callback(self, items: Sequence[RequestItem]) -> None: """Map the callback request to the appropriate gRPC request. Args: - action (str): The method to be invoked. - kwargs (Dict[str, Any]): The keyword arguments for the method - specified by ``action``. - - Raises: - ValueError: If ``action`` isn't one of the expected actions - "ack", "drop", "lease", "modify_ack_deadline" or "nack". + items: + Queued requests to dispatch. """ - if not self._manager.is_active: - return - - batched_commands = collections.defaultdict(list) + lease_requests: List[requests.LeaseRequest] = [] + modack_requests: List[requests.ModAckRequest] = [] + ack_requests: List[requests.AckRequest] = [] + nack_requests: List[requests.NackRequest] = [] + drop_requests: List[requests.DropRequest] = [] + + lease_ids = set() + modack_ids = set() + ack_ids = set() + nack_ids = set() + drop_ids = set() + exactly_once_delivery_enabled = self._manager._exactly_once_delivery_enabled() for item in items: - batched_commands[item.__class__].append(item) + if isinstance(item, requests.LeaseRequest): + if ( + item.ack_id not in lease_ids + ): # LeaseRequests have no futures to handle. + lease_ids.add(item.ack_id) + lease_requests.append(item) + elif isinstance(item, requests.ModAckRequest): + if item.ack_id in modack_ids: + self._handle_duplicate_request_future( + exactly_once_delivery_enabled, item + ) + else: + modack_ids.add(item.ack_id) + modack_requests.append(item) + elif isinstance(item, requests.AckRequest): + if item.ack_id in ack_ids: + self._handle_duplicate_request_future( + exactly_once_delivery_enabled, item + ) + else: + ack_ids.add(item.ack_id) + ack_requests.append(item) + elif isinstance(item, requests.NackRequest): + if item.ack_id in nack_ids: + self._handle_duplicate_request_future( + exactly_once_delivery_enabled, item + ) + else: + nack_ids.add(item.ack_id) + nack_requests.append(item) + elif isinstance(item, requests.DropRequest): + if ( + item.ack_id not in drop_ids + ): # DropRequests have no futures to handle. + drop_ids.add(item.ack_id) + drop_requests.append(item) + else: + warnings.warn( + f'Skipping unknown request item of type "{type(item)}"', + category=RuntimeWarning, + ) _LOGGER.debug("Handling %d batched requests", len(items)) - if batched_commands[requests.LeaseRequest]: - self.lease(batched_commands.pop(requests.LeaseRequest)) - if batched_commands[requests.ModAckRequest]: - self.modify_ack_deadline(batched_commands.pop(requests.ModAckRequest)) + if lease_requests: + self.lease(lease_requests) + + if modack_requests: + self.modify_ack_deadline(modack_requests) + # Note: Drop and ack *must* be after lease. It's possible to get both # the lease and the ack/drop request in the same batch. - if batched_commands[requests.AckRequest]: - self.ack(batched_commands.pop(requests.AckRequest)) - if batched_commands[requests.NackRequest]: - self.nack(batched_commands.pop(requests.NackRequest)) - if batched_commands[requests.DropRequest]: - self.drop(batched_commands.pop(requests.DropRequest)) - - def ack(self, items): + if ack_requests: + self.ack(ack_requests) + + if nack_requests: + self.nack(nack_requests) + + if drop_requests: + self.drop(drop_requests) + + def _handle_duplicate_request_future( + self, + exactly_once_delivery_enabled: bool, + item: Union[requests.AckRequest, requests.ModAckRequest, requests.NackRequest], + ) -> None: + _LOGGER.debug( + "This is a duplicate %s with the same ack_id: %s.", + type(item), + item.ack_id, + ) + if item.future: + if exactly_once_delivery_enabled: + item.future.set_exception( + ValueError(f"Duplicate ack_id for {type(item)}") + ) + # Futures may be present even with exactly-once delivery + # disabled, in transition periods after the setting is changed on + # the subscription. + else: + # When exactly-once delivery is NOT enabled, acks/modacks are considered + # best-effort, so the future should succeed even though this is a duplicate. + item.future.set_result(AcknowledgeStatus.SUCCESS) + + def ack(self, items: Sequence[requests.AckRequest]) -> None: """Acknowledge the given messages. Args: - items(Sequence[AckRequest]): The items to acknowledge. + items: The items to acknowledge. """ # If we got timing information, add it to the histogram. for item in items: @@ -136,64 +235,424 @@ def ack(self, items): # We must potentially split the request into multiple smaller requests # to avoid the server-side max request size limit. - ack_ids = (item.ack_id for item in items) + items_gen = iter(items) + ack_ids_gen = (item.ack_id for item in items) total_chunks = int(math.ceil(len(items) / _ACK_IDS_BATCH_SIZE)) + subscription_id: Optional[str] = None + project_id: Optional[str] = None + for item in items: + if item.opentelemetry_data: + item.opentelemetry_data.add_subscribe_span_event("ack start") + if subscription_id is None: + subscription_id = item.opentelemetry_data.subscription_id + if project_id is None: + project_id = item.opentelemetry_data.project_id for _ in range(total_chunks): - request = types.StreamingPullRequest( - ack_ids=itertools.islice(ack_ids, _ACK_IDS_BATCH_SIZE) + ack_reqs_dict = { + req.ack_id: req + for req in itertools.islice(items_gen, _ACK_IDS_BATCH_SIZE) + } + + subscribe_links: List[trace.Link] = [] + subscribe_spans: List[trace.Span] = [] + for ack_req in ack_reqs_dict.values(): + if ack_req.opentelemetry_data: + subscribe_span: Optional[ + trace.Span + ] = ack_req.opentelemetry_data.subscribe_span + if ( + subscribe_span + and subscribe_span.get_span_context().trace_flags.sampled + ): + subscribe_links.append( + trace.Link(subscribe_span.get_span_context()) + ) + subscribe_spans.append(subscribe_span) + ack_span: Optional[trace.Span] = None + if subscription_id and project_id: + ack_span = start_ack_span( + subscription_id, + len(ack_reqs_dict), + project_id, + subscribe_links, + ) + if ( + ack_span and ack_span.get_span_context().trace_flags.sampled + ): # pragma: NO COVER + ack_span_context: trace.SpanContext = ack_span.get_span_context() + for subscribe_span in subscribe_spans: + subscribe_span.add_link( + context=ack_span_context, + attributes={ + "messaging.operation.name": "ack", + }, + ) + + requests_completed, requests_to_retry = self._manager.send_unary_ack( + ack_ids=list(itertools.islice(ack_ids_gen, _ACK_IDS_BATCH_SIZE)), + ack_reqs_dict=ack_reqs_dict, + ) + if ack_span: + ack_span.end() + + for completed_ack in requests_completed: + if completed_ack.opentelemetry_data: + completed_ack.opentelemetry_data.add_subscribe_span_event("ack end") + completed_ack.opentelemetry_data.set_subscribe_span_result("acked") + completed_ack.opentelemetry_data.end_subscribe_span() + + # Remove the completed messages from lease management. + self.drop(requests_completed) + + # Retry on a separate thread so the dispatcher thread isn't blocked + # by sleeps. + if requests_to_retry: + self._start_retry_thread( + "Thread-RetryAcks", + functools.partial(self._retry_acks, requests_to_retry), + ) + + def _start_retry_thread(self, thread_name, thread_target): + # note: if the thread is *not* a daemon, a memory leak exists due to a cpython issue. + # https://github.com/googleapis/python-pubsub/issues/395#issuecomment-829910303 + # https://github.com/googleapis/python-pubsub/issues/395#issuecomment-830092418 + retry_thread = threading.Thread( + name=thread_name, + target=thread_target, + daemon=True, + ) + # The thread finishes when the requests succeed or eventually fail with + # a back-end timeout error or other permanent failure. + retry_thread.start() + + def _retry_acks(self, requests_to_retry: List[requests.AckRequest]): + retry_delay_gen = exponential_sleep_generator( + initial=_MIN_EXACTLY_ONCE_DELIVERY_ACK_MODACK_RETRY_DURATION_SECS, + maximum=_MAX_EXACTLY_ONCE_DELIVERY_ACK_MODACK_RETRY_DURATION_SECS, + ) + while requests_to_retry: + time_to_wait = next(retry_delay_gen) + _LOGGER.debug( + "Retrying {len(requests_to_retry)} ack(s) after delay of " + + str(time_to_wait) + + " seconds" + ) + time.sleep(time_to_wait) + + ack_reqs_dict = {req.ack_id: req for req in requests_to_retry} + subscription_id: Optional[str] = None + project_id: Optional[str] = None + subscribe_links: List[trace.Link] = [] + subscribe_spans: List[trace.Span] = [] + for req in requests_to_retry: + if req.opentelemetry_data: + req.opentelemetry_data.add_subscribe_span_event("ack start") + if subscription_id is None: + subscription_id = req.opentelemetry_data.subscription_id + if project_id is None: + project_id = req.opentelemetry_data.project_id + subscribe_span: Optional[ + trace.Span + ] = req.opentelemetry_data.subscribe_span + if ( + subscribe_span + and subscribe_span.get_span_context().trace_flags.sampled + ): + subscribe_links.append( + trace.Link(subscribe_span.get_span_context()) + ) + subscribe_spans.append(subscribe_span) + ack_span: Optional[trace.Span] = None + if subscription_id and project_id: + ack_span = start_ack_span( + subscription_id, + len(ack_reqs_dict), + project_id, + subscribe_links, + ) + if ( + ack_span and ack_span.get_span_context().trace_flags.sampled + ): # pragma: NO COVER + ack_span_context: trace.SpanContext = ack_span.get_span_context() + for subscribe_span in subscribe_spans: + subscribe_span.add_link( + context=ack_span_context, + attributes={ + "messaging.operation.name": "ack", + }, + ) + + requests_completed, requests_to_retry = self._manager.send_unary_ack( + ack_ids=[req.ack_id for req in requests_to_retry], + ack_reqs_dict=ack_reqs_dict, ) - self._manager.send(request) - - # Remove the message from lease management. - self.drop(items) - def drop(self, items): + if ack_span: + ack_span.end() + + for completed_ack in requests_completed: + if completed_ack.opentelemetry_data: + completed_ack.opentelemetry_data.add_subscribe_span_event("ack end") + completed_ack.opentelemetry_data.set_subscribe_span_result("acked") + completed_ack.opentelemetry_data.end_subscribe_span() + + assert ( + len(requests_to_retry) <= _ACK_IDS_BATCH_SIZE + ), "Too many requests to be retried." + # Remove the completed messages from lease management. + self.drop(requests_completed) + + def drop( + self, + items: Sequence[ + Union[requests.AckRequest, requests.DropRequest, requests.NackRequest] + ], + ) -> None: """Remove the given messages from lease management. Args: - items(Sequence[DropRequest]): The items to drop. + items: The items to drop. """ + assert self._manager.leaser is not None self._manager.leaser.remove(items) ordering_keys = (k.ordering_key for k in items if k.ordering_key) self._manager.activate_ordering_keys(ordering_keys) self._manager.maybe_resume_consumer() - def lease(self, items): + def lease(self, items: Sequence[requests.LeaseRequest]) -> None: """Add the given messages to lease management. Args: - items(Sequence[LeaseRequest]): The items to lease. + items: The items to lease. """ + assert self._manager.leaser is not None self._manager.leaser.add(items) self._manager.maybe_pause_consumer() - def modify_ack_deadline(self, items): + def modify_ack_deadline( + self, + items: Sequence[requests.ModAckRequest], + default_deadline: Optional[float] = None, + ) -> None: """Modify the ack deadline for the given messages. Args: - items(Sequence[ModAckRequest]): The items to modify. + items: The items to modify. """ # We must potentially split the request into multiple smaller requests # to avoid the server-side max request size limit. - ack_ids = (item.ack_id for item in items) - seconds = (item.seconds for item in items) + items_gen = iter(items) + ack_ids_gen = (item.ack_id for item in items) + deadline_seconds_gen = (item.seconds for item in items) total_chunks = int(math.ceil(len(items) / _ACK_IDS_BATCH_SIZE)) + subscription_id: Optional[str] = None + project_id: Optional[str] = None + + for item in items: + if item.opentelemetry_data: + if math.isclose(item.seconds, 0): + item.opentelemetry_data.add_subscribe_span_event("nack start") + if subscription_id is None: + subscription_id = item.opentelemetry_data.subscription_id + if project_id is None: + project_id = item.opentelemetry_data.project_id + else: + item.opentelemetry_data.add_subscribe_span_event("modack start") for _ in range(total_chunks): - request = types.StreamingPullRequest( - modify_deadline_ack_ids=itertools.islice(ack_ids, _ACK_IDS_BATCH_SIZE), - modify_deadline_seconds=itertools.islice(seconds, _ACK_IDS_BATCH_SIZE), + ack_reqs_dict = { + req.ack_id: req + for req in itertools.islice(items_gen, _ACK_IDS_BATCH_SIZE) + } + subscribe_links: List[trace.Link] = [] + subscribe_spans: List[trace.Span] = [] + for ack_req in ack_reqs_dict.values(): + if ack_req.opentelemetry_data and math.isclose(ack_req.seconds, 0): + subscribe_span: Optional[ + trace.Span + ] = ack_req.opentelemetry_data.subscribe_span + if ( + subscribe_span + and subscribe_span.get_span_context().trace_flags.sampled + ): + subscribe_links.append( + trace.Link(subscribe_span.get_span_context()) + ) + subscribe_spans.append(subscribe_span) + nack_span: Optional[trace.Span] = None + if subscription_id and project_id: + nack_span = start_nack_span( + subscription_id, + len(ack_reqs_dict), + project_id, + subscribe_links, + ) + if ( + nack_span and nack_span.get_span_context().trace_flags.sampled + ): # pragma: NO COVER + nack_span_context: trace.SpanContext = nack_span.get_span_context() + for subscribe_span in subscribe_spans: + subscribe_span.add_link( + context=nack_span_context, + attributes={ + "messaging.operation.name": "nack", + }, + ) + requests_to_retry: List[requests.ModAckRequest] + requests_completed: Optional[List[requests.ModAckRequest]] = None + if default_deadline is None: + # no further work needs to be done for `requests_to_retry` + requests_completed, requests_to_retry = self._manager.send_unary_modack( + modify_deadline_ack_ids=list( + itertools.islice(ack_ids_gen, _ACK_IDS_BATCH_SIZE) + ), + modify_deadline_seconds=list( + itertools.islice(deadline_seconds_gen, _ACK_IDS_BATCH_SIZE) + ), + ack_reqs_dict=ack_reqs_dict, + default_deadline=None, + ) + else: + requests_completed, requests_to_retry = self._manager.send_unary_modack( + modify_deadline_ack_ids=itertools.islice( + ack_ids_gen, _ACK_IDS_BATCH_SIZE + ), + modify_deadline_seconds=None, + ack_reqs_dict=ack_reqs_dict, + default_deadline=default_deadline, + ) + if nack_span: + nack_span.end() + assert ( + len(requests_to_retry) <= _ACK_IDS_BATCH_SIZE + ), "Too many requests to be retried." + + for completed_modack in requests_completed: + if completed_modack.opentelemetry_data: + # nack is a modack with 0 extension seconds. + if math.isclose(completed_modack.seconds, 0): + completed_modack.opentelemetry_data.set_subscribe_span_result( + "nacked" + ) + completed_modack.opentelemetry_data.add_subscribe_span_event( + "nack end" + ) + completed_modack.opentelemetry_data.end_subscribe_span() + else: + completed_modack.opentelemetry_data.add_subscribe_span_event( + "modack end" + ) + + # Retry on a separate thread so the dispatcher thread isn't blocked + # by sleeps. + if requests_to_retry: + self._start_retry_thread( + "Thread-RetryModAcks", + functools.partial(self._retry_modacks, requests_to_retry), + ) + + def _retry_modacks(self, requests_to_retry): + retry_delay_gen = exponential_sleep_generator( + initial=_MIN_EXACTLY_ONCE_DELIVERY_ACK_MODACK_RETRY_DURATION_SECS, + maximum=_MAX_EXACTLY_ONCE_DELIVERY_ACK_MODACK_RETRY_DURATION_SECS, + ) + while requests_to_retry: + time_to_wait = next(retry_delay_gen) + _LOGGER.debug( + "Retrying {len(requests_to_retry)} modack(s) after delay of " + + str(time_to_wait) + + " seconds" ) - self._manager.send(request) - - def nack(self, items): + time.sleep(time_to_wait) + + ack_reqs_dict = {req.ack_id: req for req in requests_to_retry} + + subscription_id = None + project_id = None + subscribe_links = [] + subscribe_spans = [] + for ack_req in ack_reqs_dict.values(): + if ack_req.opentelemetry_data and math.isclose(ack_req.seconds, 0): + if subscription_id is None: + subscription_id = ack_req.opentelemetry_data.subscription_id + if project_id is None: + project_id = ack_req.opentelemetry_data.project_id + subscribe_span = ack_req.opentelemetry_data.subscribe_span + if ( + subscribe_span + and subscribe_span.get_span_context().trace_flags.sampled + ): + subscribe_links.append( + trace.Link(subscribe_span.get_span_context()) + ) + subscribe_spans.append(subscribe_span) + nack_span = None + if subscription_id and project_id: + nack_span = start_nack_span( + subscription_id, + len(ack_reqs_dict), + project_id, + subscribe_links, + ) + if ( + nack_span and nack_span.get_span_context().trace_flags.sampled + ): # pragma: NO COVER + nack_span_context: trace.SpanContext = nack_span.get_span_context() + for subscribe_span in subscribe_spans: + subscribe_span.add_link( + context=nack_span_context, + attributes={ + "messaging.operation.name": "nack", + }, + ) + requests_completed, requests_to_retry = self._manager.send_unary_modack( + modify_deadline_ack_ids=[req.ack_id for req in requests_to_retry], + modify_deadline_seconds=[req.seconds for req in requests_to_retry], + ack_reqs_dict=ack_reqs_dict, + ) + if nack_span: + nack_span.end() + for completed_modack in requests_completed: + if completed_modack.opentelemetry_data: + # nack is a modack with 0 extension seconds. + if math.isclose(completed_modack.seconds, 0): + completed_modack.opentelemetry_data.set_subscribe_span_result( + "nacked" + ) + completed_modack.opentelemetry_data.add_subscribe_span_event( + "nack end" + ) + completed_modack.opentelemetry_data.end_subscribe_span() + else: + completed_modack.opentelemetry_data.add_subscribe_span_event( + "modack end" + ) + + def nack(self, items: Sequence[requests.NackRequest]) -> None: """Explicitly deny receipt of messages. Args: - items(Sequence[NackRequest]): The items to deny. + items: The items to deny. """ self.modify_ack_deadline( - [requests.ModAckRequest(ack_id=item.ack_id, seconds=0) for item in items] + [ + requests.ModAckRequest( + ack_id=item.ack_id, + seconds=0, + future=item.future, + opentelemetry_data=item.opentelemetry_data, + ) + for item in items + ] + ) + self.drop( + [ + requests.DropRequest( + ack_id=item.ack_id, + byte_size=item.byte_size, + ordering_key=item.ordering_key, + ) + for item in items + ] ) - self.drop([requests.DropRequest(*item) for item in items]) diff --git a/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py b/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py index 9cd84a1e2..a053d5fe4 100644 --- a/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py +++ b/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py @@ -16,6 +16,13 @@ import logging import threading +import typing +from typing import Optional + +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager import ( + StreamingPullManager, + ) _LOGGER = logging.getLogger(__name__) @@ -27,23 +34,23 @@ class Heartbeater(object): - def __init__(self, manager, period=_DEFAULT_PERIOD): - self._thread = None + def __init__(self, manager: "StreamingPullManager", period: int = _DEFAULT_PERIOD): + self._thread: Optional[threading.Thread] = None self._operational_lock = threading.Lock() self._manager = manager self._stop_event = threading.Event() self._period = period - def heartbeat(self): - """Periodically send heartbeats.""" - while self._manager.is_active and not self._stop_event.is_set(): - self._manager.heartbeat() - _LOGGER.debug("Sent heartbeat.") + def heartbeat(self) -> None: + """Periodically send streaming pull heartbeats.""" + while not self._stop_event.is_set(): + if self._manager.heartbeat(): + _LOGGER.debug("Sent heartbeat.") self._stop_event.wait(timeout=self._period) - _LOGGER.info("%s exiting.", _HEARTBEAT_WORKER_NAME) + _LOGGER.debug("%s exiting.", _HEARTBEAT_WORKER_NAME) - def start(self): + def start(self) -> None: with self._operational_lock: if self._thread is not None: raise ValueError("Heartbeater is already running.") @@ -58,7 +65,7 @@ def start(self): _LOGGER.debug("Started helper thread %s", thread.name) self._thread = thread - def stop(self): + def stop(self) -> None: with self._operational_lock: self._stop_event.set() diff --git a/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py b/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py index 80ad58738..a7e18a88e 100644 --- a/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py +++ b/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py @@ -13,11 +13,11 @@ # limitations under the License. import logging +import queue import time +from typing import Any, Callable, List, Sequence, Optional import uuid -from six.moves import queue - __all__ = ("QueueCallbackWorker", "STOP") @@ -31,22 +31,25 @@ STOP = uuid.uuid4() -def _get_many(queue_, max_items=None, max_latency=0): +def _get_many( + queue_: queue.Queue, max_items: Optional[int] = None, max_latency: float = 0 +) -> List[Any]: """Get multiple items from a Queue. Gets at least one (blocking) and at most ``max_items`` items (non-blocking) from a given Queue. Does not mark the items as done. Args: - queue_ (~queue.Queue`): The Queue to get items from. - max_items (int): The maximum number of items to get. If ``None``, then - all available items in the queue are returned. - max_latency (float): The maximum number of seconds to wait for more - than one item from a queue. This number includes the time required - to retrieve the first item. + queue_: The Queue to get items from. + max_items: + The maximum number of items to get. If ``None``, then all available items + in the queue are returned. + max_latency: + The maximum number of seconds to wait for more than one item from a queue. + This number includes the time required to retrieve the first item. Returns: - Sequence[Any]: A sequence of items retrieved from the queue. + A sequence of items retrieved from the queue. """ start = time.time() # Always return at least one item. @@ -68,26 +71,33 @@ class QueueCallbackWorker(object): :attr:`STOP`. Args: - queue (~queue.Queue): A Queue instance, appropriate for crossing the - concurrency boundary implemented by ``executor``. Items will - be popped off (with a blocking ``get()``) until :attr:`STOP` - is encountered. - callback (Callable[Sequence[Any], Any]): A callback that can process - items pulled off of the queue. Multiple items will be passed to - the callback in batches. - max_items (int): The maximum amount of items that will be passed to the - callback at a time. - max_latency (float): The maximum amount of time in seconds to wait for - additional items before executing the callback. + queue: + A Queue instance, appropriate for crossing the concurrency boundary + implemented by ``executor``. Items will be popped off (with a blocking + ``get()``) until :attr:`STOP` is encountered. + callback: + A callback that can process items pulled off of the queue. Multiple items + will be passed to the callback in batches. + max_items: + The maximum amount of items that will be passed to the callback at a time. + max_latency: + The maximum amount of time in seconds to wait for additional items before + executing the callback. """ - def __init__(self, queue, callback, max_items=100, max_latency=0): + def __init__( + self, + queue: queue.Queue, + callback: Callable[[Sequence[Any]], Any], + max_items: int = 100, + max_latency: float = 0, + ): self.queue = queue self._callback = callback self.max_items = max_items self.max_latency = max_latency - def __call__(self): + def __call__(self) -> None: continue_ = True while continue_: items = _get_many( diff --git a/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py b/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py index 29ee6fc61..d922bbf68 100644 --- a/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py +++ b/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py @@ -12,7 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import absolute_import, division +from typing import Dict, Optional, Union + + +MIN_ACK_DEADLINE = 10 +MAX_ACK_DEADLINE = 600 class Histogram(object): @@ -27,19 +31,20 @@ class Histogram(object): are free to use a different formula. The precision of data stored is to the nearest integer. Additionally, - values outside the range of ``10 <= x <= 600`` are stored as ``10`` or - ``600``, since these are the boundaries of leases in the actual API. + values outside the range of ``MIN_ACK_DEADLINE <= x <= MAX_ACK_DEADLINE`` are stored + as ``MIN_ACK_DEADLINE`` or ``MAX_ACK_DEADLINE``, since these are the boundaries of + leases in the actual API. """ - def __init__(self, data=None): + def __init__(self, data: Optional[Dict[int, int]] = None): """Instantiate the histogram. Args: - data (Mapping[str, int]): The data strucure to be used to store - the underlying data. The default is an empty dictionary. - This can be set to a dictionary-like object if required - (for example, if a special object is needed for - concurrency reasons). + data: + The data strucure to be used to store the underlying data. The default + is an empty dictionary. This can be set to a dictionary-like object if + required (for example, if a special object is needed for concurrency + reasons). """ # The data is stored as a dictionary, with the keys being the # value being added and the values being the number of times that @@ -55,23 +60,19 @@ def __init__(self, data=None): self._data = data self._len = 0 - def __len__(self): + def __len__(self) -> int: """Return the total number of data points in this histogram. This is cached on a separate counter (rather than computing it using ``sum([v for v in self._data.values()])``) to optimize lookup. Returns: - int: The total number of data points in this histogram. + The total number of data points in this histogram. """ return self._len - def __contains__(self, needle): - """Return True if needle is present in the histogram, False otherwise. - - Returns: - bool: True or False - """ + def __contains__(self, needle: int) -> bool: + """Return ``True`` if needle is present in the histogram, ``False`` otherwise.""" return needle in self._data def __repr__(self): @@ -80,59 +81,63 @@ def __repr__(self): ) @property - def max(self): + def max(self) -> int: """Return the maximum value in this histogram. - If there are no values in the histogram at all, return 600. + If there are no values in the histogram at all, return ``MAX_ACK_DEADLINE``. Returns: - int: The maximum value in the histogram. + The maximum value in the histogram. """ if len(self._data) == 0: - return 600 + return MAX_ACK_DEADLINE return next(iter(reversed(sorted(self._data.keys())))) @property - def min(self): + def min(self) -> int: """Return the minimum value in this histogram. - If there are no values in the histogram at all, return 10. + If there are no values in the histogram at all, return ``MIN_ACK_DEADLINE``. Returns: - int: The minimum value in the histogram. + The minimum value in the histogram. """ if len(self._data) == 0: - return 10 + return MIN_ACK_DEADLINE return next(iter(sorted(self._data.keys()))) - def add(self, value): + def add(self, value: Union[int, float]) -> None: """Add the value to this histogram. Args: - value (int): The value. Values outside of ``10 <= x <= 600`` - will be raised to ``10`` or reduced to ``600``. + value: + The value. Values outside of + ``MIN_ACK_DEADLINE <= x <= MAX_ACK_DEADLINE`` + will be raised to ``MIN_ACK_DEADLINE`` or reduced to + ``MAX_ACK_DEADLINE``. """ # If the value is out of bounds, bring it in bounds. value = int(value) - if value < 10: - value = 10 - if value > 600: - value = 600 + if value < MIN_ACK_DEADLINE: + value = MIN_ACK_DEADLINE + elif value > MAX_ACK_DEADLINE: + value = MAX_ACK_DEADLINE # Add the value to the histogram's data dictionary. self._data.setdefault(value, 0) self._data[value] += 1 self._len += 1 - def percentile(self, percent): + def percentile(self, percent: Union[int, float]) -> int: """Return the value that is the Nth precentile in the histogram. Args: - percent (Union[int, float]): The precentile being sought. The - default consumer implementations use consistently use ``99``. + percent: + The precentile being sought. The default consumer implementations + consistently use ``99``. Returns: - int: The value corresponding to the requested percentile. + The value corresponding to the requested percentile. """ # Sanity check: Any value over 100 should become 100. if percent >= 100: @@ -150,5 +155,5 @@ def percentile(self, percent): return k # The only way to get here is if there was no data. - # In this case, just return 10 seconds. - return 10 + # In this case, just return the shortest possible deadline. + return MIN_ACK_DEADLINE diff --git a/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index 5830680da..5abdb7081 100644 --- a/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -14,30 +14,51 @@ from __future__ import absolute_import -import collections import copy import logging import random import threading import time +import typing +from typing import Dict, Iterable, Optional, Union -import six +from google.cloud.pubsub_v1.subscriber._protocol.dispatcher import _MAX_BATCH_LATENCY +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) + +try: + from collections.abc import KeysView + + KeysView[None] # KeysView is only subscriptable in Python 3.9+ +except TypeError: + # Deprecated since Python 3.9, thus only use as a fallback in older Python versions + from typing import KeysView from google.cloud.pubsub_v1.subscriber._protocol import requests +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager import ( + StreamingPullManager, + ) + _LOGGER = logging.getLogger(__name__) _LEASE_WORKER_NAME = "Thread-LeaseMaintainer" -_LeasedMessage = collections.namedtuple( - "_LeasedMessage", ["sent_time", "size", "ordering_key"] -) +class _LeasedMessage(typing.NamedTuple): + sent_time: float + """The local time when ACK ID was initially leased in seconds since the epoch.""" + + size: int + ordering_key: Optional[str] + opentelemetry_data: Optional[SubscribeOpenTelemetry] class Leaser(object): - def __init__(self, manager): - self._thread = None + def __init__(self, manager: "StreamingPullManager"): + self._thread: Optional[threading.Thread] = None self._manager = manager # a lock used for start/stop operations, protecting the _thread attribute @@ -48,30 +69,29 @@ def __init__(self, manager): self._add_remove_lock = threading.Lock() # Dict of ack_id -> _LeasedMessage - self._leased_messages = {} - """dict[str, float]: A mapping of ack IDs to the local time when the - ack ID was initially leased in seconds since the epoch.""" + self._leased_messages: Dict[str, _LeasedMessage] = {} + self._bytes = 0 - """int: The total number of bytes consumed by leased messages.""" + """The total number of bytes consumed by leased messages.""" self._stop_event = threading.Event() @property - def message_count(self): - """int: The number of leased messages.""" + def message_count(self) -> int: + """The number of leased messages.""" return len(self._leased_messages) @property - def ack_ids(self): - """Sequence[str]: The ack IDs of all leased messages.""" + def ack_ids(self) -> KeysView[str]: + """The ack IDs of all leased messages.""" return self._leased_messages.keys() @property - def bytes(self): - """int: The total size, in bytes, of all leased messages.""" + def bytes(self) -> int: + """The total size, in bytes, of all leased messages.""" return self._bytes - def add(self, items): + def add(self, items: Iterable[requests.LeaseRequest]) -> None: """Add messages to be managed by the leaser.""" with self._add_remove_lock: for item in items: @@ -82,17 +102,17 @@ def add(self, items): sent_time=float("inf"), size=item.byte_size, ordering_key=item.ordering_key, + opentelemetry_data=item.opentelemetry_data, ) self._bytes += item.byte_size else: _LOGGER.debug("Message %s is already lease managed", item.ack_id) - def start_lease_expiry_timer(self, ack_ids): + def start_lease_expiry_timer(self, ack_ids: Iterable[str]) -> None: """Start the lease expiry timer for `items`. Args: - items (Sequence[str]): Sequence of ack-ids for which to start - lease expiry timers. + items: Sequence of ack-ids for which to start lease expiry timers. """ with self._add_remove_lock: for ack_id in ack_ids: @@ -104,7 +124,12 @@ def start_lease_expiry_timer(self, ack_ids): sent_time=time.time() ) - def remove(self, items): + def remove( + self, + items: Iterable[ + Union[requests.AckRequest, requests.DropRequest, requests.NackRequest] + ], + ) -> None: """Remove messages from lease management.""" with self._add_remove_lock: # Remove the ack ID from lease management, and decrement the @@ -119,18 +144,20 @@ def remove(self, items): _LOGGER.debug("Bytes was unexpectedly negative: %d", self._bytes) self._bytes = 0 - def maintain_leases(self): + def maintain_leases(self) -> None: """Maintain all of the leases being managed. This method modifies the ack deadline for all of the managed ack IDs, then waits for most of that time (but with jitter), and repeats. """ - while self._manager.is_active and not self._stop_event.is_set(): + while not self._stop_event.is_set(): # Determine the appropriate duration for the lease. This is # based off of how long previous messages have taken to ack, with # a sensible default and within the ranges allowed by Pub/Sub. - deadline = self._manager.ack_deadline + # Also update the deadline currently used if enough new ACK data has been + # gathered since the last deadline update. + deadline = self._manager._obtain_ack_deadline(maybe_update=True) _LOGGER.debug("The current deadline value is %d seconds.", deadline) # Make a copy of the leased messages. This is needed because it's @@ -144,7 +171,7 @@ def maintain_leases(self): cutoff = time.time() - self._manager.flow_control.max_lease_duration to_drop = [ requests.DropRequest(ack_id, item.size, item.ordering_key) - for ack_id, item in six.iteritems(leased_messages) + for ack_id, item in leased_messages.items() if item.sent_time < cutoff ] @@ -152,6 +179,18 @@ def maintain_leases(self): _LOGGER.warning( "Dropping %s items because they were leased too long.", len(to_drop) ) + assert self._manager.dispatcher is not None + for drop_msg in to_drop: + leased_message = leased_messages.get(drop_msg.ack_id) + if leased_message and leased_message.opentelemetry_data: + leased_message.opentelemetry_data.add_process_span_event( + "expired" + ) + leased_message.opentelemetry_data.end_process_span() + leased_message.opentelemetry_data.set_subscribe_span_result( + "expired" + ) + leased_message.opentelemetry_data.end_subscribe_span() self._manager.dispatcher.drop(to_drop) # Remove dropped items from our copy of the leased messages (they @@ -160,10 +199,11 @@ def maintain_leases(self): for item in to_drop: leased_messages.pop(item.ack_id) - # Create a streaming pull request. + # Create a modack request. # We do not actually call `modify_ack_deadline` over and over # because it is more efficient to make a single request. ack_ids = leased_messages.keys() + expired_ack_ids = set() if ack_ids: _LOGGER.debug("Renewing lease for %d ack IDs.", len(ack_ids)) @@ -172,23 +212,60 @@ def maintain_leases(self): # without any sort of race condition would require a # way for ``send_request`` to fail when the consumer # is inactive. - self._manager.dispatcher.modify_ack_deadline( - [requests.ModAckRequest(ack_id, deadline) for ack_id in ack_ids] + assert self._manager.dispatcher is not None + ack_id_gen = (ack_id for ack_id in ack_ids) + opentelemetry_data = [ + message.opentelemetry_data + for message in list(leased_messages.values()) + if message.opentelemetry_data + ] + expired_ack_ids = self._manager._send_lease_modacks( + ack_id_gen, + deadline, + opentelemetry_data, ) + start_time = time.time() + # If exactly once delivery is enabled, we should drop all expired ack_ids from lease management. + if self._manager._exactly_once_delivery_enabled() and len(expired_ack_ids): + assert self._manager.dispatcher is not None + for ack_id in expired_ack_ids: + msg = leased_messages.get(ack_id) + if msg and msg.opentelemetry_data: + msg.opentelemetry_data.add_process_span_event("expired") + msg.opentelemetry_data.end_process_span() + msg.opentelemetry_data.set_subscribe_span_result("expired") + msg.opentelemetry_data.end_subscribe_span() + self._manager.dispatcher.drop( + [ + requests.DropRequest( + ack_id, + leased_messages.get(ack_id).size, # type: ignore + leased_messages.get(ack_id).ordering_key, # type: ignore + ) + for ack_id in expired_ack_ids + if ack_id in leased_messages + ] + ) # Now wait an appropriate period of time and do this again. # # We determine the appropriate period of time based on a random - # period between 0 seconds and 90% of the lease. This use of - # jitter (http://bit.ly/2s2ekL7) helps decrease contention in cases + # period between: + # minimum: MAX_BATCH_LATENCY (to prevent duplicate modacks being created in one batch) + # maximum: 90% of the deadline + # This maximum time attempts to prevent ack expiration before new lease modacks arrive at the server. + # This use of jitter (http://bit.ly/2s2ekL7) helps decrease contention in cases # where there are many clients. - snooze = random.uniform(0.0, deadline * 0.9) + # If we spent any time iterating over expired acks, we should subtract this from the deadline. + snooze = random.uniform( + _MAX_BATCH_LATENCY, (deadline * 0.9 - (time.time() - start_time)) + ) _LOGGER.debug("Snoozing lease management for %f seconds.", snooze) self._stop_event.wait(timeout=snooze) - _LOGGER.info("%s exiting.", _LEASE_WORKER_NAME) + _LOGGER.debug("%s exiting.", _LEASE_WORKER_NAME) - def start(self): + def start(self) -> None: with self._operational_lock: if self._thread is not None: raise ValueError("Leaser is already running.") @@ -203,7 +280,7 @@ def start(self): _LOGGER.debug("Started helper thread %s", thread.name) self._thread = thread - def stop(self): + def stop(self) -> None: with self._operational_lock: self._stop_event.set() diff --git a/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py b/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py index bab15f218..3d4c2a392 100644 --- a/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py +++ b/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py @@ -13,11 +13,19 @@ # limitations under the License. import collections +import logging +import typing +from typing import Any, Callable, Iterable, Optional + +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud.pubsub_v1 import subscriber + + +_LOGGER = logging.getLogger(__name__) class MessagesOnHold(object): - """Tracks messages on hold by ordering key. Not thread-safe. - """ + """Tracks messages on hold by ordering key. Not thread-safe.""" def __init__(self): self._size = 0 @@ -41,27 +49,25 @@ def __init__(self): self._pending_ordered_messages = {} @property - def size(self): - """Return the number of messages on hold across ordered and unordered - messages. + def size(self) -> int: + """Return the number of messages on hold across ordered and unordered messages. Note that this object may still store information about ordered messages in flight even if size is zero. Returns: - int: The size value. + The size value. """ return self._size - def get(self): - """ Gets a message from the on-hold queue. A message with an ordering + def get(self) -> Optional["subscriber.message.Message"]: + """Gets a message from the on-hold queue. A message with an ordering key wont be returned if there's another message with the same key in flight. Returns: - Optional[google.cloud.pubsub_v1.subscriber.message.Message]: A message - that hasn't been sent to the user yet or None if there are no - messages available. + A message that hasn't been sent to the user yet or ``None`` if there are no + messages available. """ while self._messages_on_hold: msg = self._messages_on_hold.popleft() @@ -88,17 +94,22 @@ def get(self): return None - def put(self, message): + def put(self, message: "subscriber.message.Message") -> None: """Put a message on hold. Args: - message (google.cloud.pubsub_v1.subscriber.message.Message): The - message to put on hold. + message: The message to put on hold. """ + if message.opentelemetry_data: + message.opentelemetry_data.start_subscribe_scheduler_span() self._messages_on_hold.append(message) self._size = self._size + 1 - def activate_ordering_keys(self, ordering_keys, schedule_message_callback): + def activate_ordering_keys( + self, + ordering_keys: Iterable[str], + schedule_message_callback: Callable[["subscriber.message.Message"], Any], + ) -> None: """Send the next message in the queue for each of the passed-in ordering keys, if they exist. Clean up state for keys that no longer have any queued messages. @@ -107,15 +118,18 @@ def activate_ordering_keys(self, ordering_keys, schedule_message_callback): detail about the impact of this method on load. Args: - ordering_keys(Sequence[str]): A sequence of ordering keys to - activate. May be empty. - schedule_message_callback(Callable[google.cloud.pubsub_v1.subscriber.message.Message]): + ordering_keys: + The ordering keys to activate. May be empty, or contain duplicates. + schedule_message_callback: The callback to call to schedule a message to be sent to the user. """ for key in ordering_keys: - assert ( - self._pending_ordered_messages.get(key) is not None - ), "A message queue should exist for every ordered message in flight." + pending_ordered_messages = self._pending_ordered_messages.get(key) + if pending_ordered_messages is None: + _LOGGER.warning( + "No message queue exists for message ordering key: %s.", key + ) + continue next_msg = self._get_next_for_ordering_key(key) if next_msg: # Schedule the next message because the previous was dropped. @@ -126,18 +140,19 @@ def activate_ordering_keys(self, ordering_keys, schedule_message_callback): # No more messages for this ordering key, so do clean-up. self._clean_up_ordering_key(key) - def _get_next_for_ordering_key(self, ordering_key): + def _get_next_for_ordering_key( + self, ordering_key: str + ) -> Optional["subscriber.message.Message"]: """Get next message for ordering key. The client should call clean_up_ordering_key() if this method returns None. Args: - ordering_key (str): Ordering key for which to get the next message. + ordering_key: Ordering key for which to get the next message. Returns: - google.cloud.pubsub_v1.subscriber.message.Message|None: The - next message for this ordering key or None if there aren't any. + The next message for this ordering key or None if there aren't any. """ queue_for_key = self._pending_ordered_messages.get(ordering_key) if queue_for_key: @@ -145,18 +160,23 @@ def _get_next_for_ordering_key(self, ordering_key): return queue_for_key.popleft() return None - def _clean_up_ordering_key(self, ordering_key): + def _clean_up_ordering_key(self, ordering_key: str) -> None: """Clean up state for an ordering key with no pending messages. - Args: - ordering_key (str): The ordering key to clean up. + Args + ordering_key: The ordering key to clean up. """ message_queue = self._pending_ordered_messages.get(ordering_key) - assert ( - message_queue is not None - ), "Cleaning up ordering key that does not exist." - assert not len(message_queue), ( - "Ordering key must only be removed if there are no messages " - "left for that key." - ) + if message_queue is None: + _LOGGER.warning( + "Tried to clean up ordering key that does not exist: %s", ordering_key + ) + return + if len(message_queue) > 0: + _LOGGER.warning( + "Tried to clean up ordering key: %s with %d messages remaining.", + ordering_key, + len(message_queue), + ) + return del self._pending_ordered_messages[ordering_key] diff --git a/google/cloud/pubsub_v1/subscriber/_protocol/requests.py b/google/cloud/pubsub_v1/subscriber/_protocol/requests.py index 58d53a61d..9a0ba5a50 100644 --- a/google/cloud/pubsub_v1/subscriber/_protocol/requests.py +++ b/google/cloud/pubsub_v1/subscriber/_protocol/requests.py @@ -12,28 +12,53 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Base class for concurrency policy.""" +import typing +from typing import NamedTuple, Optional -from __future__ import absolute_import, division +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) + +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud.pubsub_v1.subscriber import futures -import collections # Namedtuples for management requests. Used by the Message class to communicate # items of work back to the policy. -AckRequest = collections.namedtuple( - "AckRequest", ["ack_id", "byte_size", "time_to_ack", "ordering_key"] -) +class AckRequest(NamedTuple): + ack_id: str + byte_size: int + time_to_ack: float + ordering_key: Optional[str] + future: Optional["futures.Future"] + opentelemetry_data: Optional[SubscribeOpenTelemetry] = None + message_id: Optional[str] = None -DropRequest = collections.namedtuple( - "DropRequest", ["ack_id", "byte_size", "ordering_key"] -) -LeaseRequest = collections.namedtuple( - "LeaseRequest", ["ack_id", "byte_size", "ordering_key"] -) +class DropRequest(NamedTuple): + ack_id: str + byte_size: int + ordering_key: Optional[str] -ModAckRequest = collections.namedtuple("ModAckRequest", ["ack_id", "seconds"]) -NackRequest = collections.namedtuple( - "NackRequest", ["ack_id", "byte_size", "ordering_key"] -) +class LeaseRequest(NamedTuple): + ack_id: str + byte_size: int + ordering_key: Optional[str] + opentelemetry_data: Optional[SubscribeOpenTelemetry] = None + + +class ModAckRequest(NamedTuple): + ack_id: str + seconds: float + future: Optional["futures.Future"] + opentelemetry_data: Optional[SubscribeOpenTelemetry] = None + message_id: Optional[str] = None + + +class NackRequest(NamedTuple): + ack_id: str + byte_size: int + ordering_key: Optional[str] + future: Optional["futures.Future"] + opentelemetry_data: Optional[SubscribeOpenTelemetry] = None diff --git a/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 4e3f24933..5132456a2 100644 --- a/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -16,12 +16,25 @@ import collections import functools +import inspect +import itertools import logging import threading +import typing +from typing import ( + Any, + Dict, + Callable, + Iterable, + List, + Optional, + Set, + Tuple, +) import uuid -import grpc -import six +from opentelemetry import trace +import grpc # type: ignore from google.api_core import bidi from google.api_core import exceptions @@ -32,90 +45,355 @@ from google.cloud.pubsub_v1.subscriber._protocol import leaser from google.cloud.pubsub_v1.subscriber._protocol import messages_on_hold from google.cloud.pubsub_v1.subscriber._protocol import requests +from google.cloud.pubsub_v1.subscriber.exceptions import ( + AcknowledgeError, + AcknowledgeStatus, +) +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) import google.cloud.pubsub_v1.subscriber.message -import google.cloud.pubsub_v1.subscriber.scheduler +from google.cloud.pubsub_v1.subscriber import futures +from google.cloud.pubsub_v1.subscriber.scheduler import ThreadScheduler +from google.pubsub_v1 import types as gapic_types +from grpc_status import rpc_status # type: ignore +from google.rpc.error_details_pb2 import ErrorInfo # type: ignore +from google.rpc import code_pb2 # type: ignore +from google.rpc import status_pb2 +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + start_modack_span, +) + +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud.pubsub_v1 import subscriber + _LOGGER = logging.getLogger(__name__) +_SLOW_ACK_LOGGER = logging.getLogger("slow-ack") +_STREAMS_LOGGER = logging.getLogger("subscriber-streams") +_FLOW_CONTROL_LOGGER = logging.getLogger("subscriber-flow-control") +_CALLBACK_DELIVERY_LOGGER = logging.getLogger("callback-delivery") +_CALLBACK_EXCEPTION_LOGGER = logging.getLogger("callback-exceptions") +_EXPIRY_LOGGER = logging.getLogger("expiry") +_REGULAR_SHUTDOWN_THREAD_NAME = "Thread-RegularStreamShutdown" _RPC_ERROR_THREAD_NAME = "Thread-OnRpcTerminated" _RETRYABLE_STREAM_ERRORS = ( + exceptions.Aborted, exceptions.DeadlineExceeded, - exceptions.ServiceUnavailable, + exceptions.GatewayTimeout, exceptions.InternalServerError, + exceptions.ResourceExhausted, + exceptions.ServiceUnavailable, exceptions.Unknown, - exceptions.GatewayTimeout, - exceptions.Aborted, ) -_TERMINATING_STREAM_ERRORS = (exceptions.Cancelled,) +_TERMINATING_STREAM_ERRORS = ( + exceptions.Cancelled, + exceptions.InvalidArgument, + exceptions.NotFound, + exceptions.PermissionDenied, + exceptions.Unauthenticated, + exceptions.Unauthorized, +) _MAX_LOAD = 1.0 """The load threshold above which to pause the incoming message stream.""" _RESUME_THRESHOLD = 0.8 """The load threshold below which to resume the incoming message stream.""" +_MIN_ACK_DEADLINE_SECS_WHEN_EXACTLY_ONCE_ENABLED = 60 +"""The minimum ack_deadline, in seconds, for when exactly_once is enabled for +a subscription. We do this to reduce premature ack expiration. +""" + +_DEFAULT_STREAM_ACK_DEADLINE: float = 60 +"""The default stream ack deadline in seconds.""" + +_MAX_STREAM_ACK_DEADLINE: float = 600 +"""The maximum stream ack deadline in seconds.""" + +_MIN_STREAM_ACK_DEADLINE: float = 10 +"""The minimum stream ack deadline in seconds.""" + +_EXACTLY_ONCE_DELIVERY_TEMPORARY_RETRY_ERRORS = { + code_pb2.DEADLINE_EXCEEDED, + code_pb2.RESOURCE_EXHAUSTED, + code_pb2.ABORTED, + code_pb2.INTERNAL, + code_pb2.UNAVAILABLE, +} + +# `on_fatal_exception` was added in `google-api-core v2.25.1``, which allows us to inform +# callers on unrecoverable errors. We can only pass this arg if it's available in the +# `BackgroundConsumer` spec. +_SHOULD_USE_ON_FATAL_ERROR_CALLBACK = "on_fatal_exception" in inspect.getfullargspec( + bidi.BackgroundConsumer +) + + +def _wrap_as_exception(maybe_exception: Any) -> BaseException: + """Wrap an object as a Python exception, if needed. + + Args: + maybe_exception: The object to wrap, usually a gRPC exception class. + + Returns: + The argument itself if an instance of ``BaseException``, otherwise + the argument represented as an instance of ``Exception`` (sub)class. + """ + if isinstance(maybe_exception, grpc.RpcError): + return exceptions.from_grpc_error(maybe_exception) + elif isinstance(maybe_exception, BaseException): + return maybe_exception -def _maybe_wrap_exception(exception): - """Wraps a gRPC exception class, if needed.""" - if isinstance(exception, grpc.RpcError): - return exceptions.from_grpc_error(exception) - return exception + return Exception(maybe_exception) -def _wrap_callback_errors(callback, on_callback_error, message): +def _wrap_callback_errors( + callback: Callable[["google.cloud.pubsub_v1.subscriber.message.Message"], Any], + on_callback_error: Callable[[BaseException], Any], + message: "google.cloud.pubsub_v1.subscriber.message.Message", +): """Wraps a user callback so that if an exception occurs the message is nacked. Args: - callback (Callable[None, Message]): The user callback. - message (~Message): The Pub/Sub message. + callback: The user callback. + message: The Pub/Sub message. """ + _CALLBACK_DELIVERY_LOGGER.debug( + "Message (id=%s, ack_id=%s, ordering_key=%s, exactly_once=%s) received by subscriber callback", + message.message_id, + message.ack_id, + message.ordering_key, + message.exactly_once_enabled, + ) + try: - callback(message) - except Exception as exc: + if message.opentelemetry_data: + message.opentelemetry_data.end_subscribe_concurrency_control_span() + with message.opentelemetry_data: + callback(message) + else: + callback(message) + except BaseException as exc: # Note: the likelihood of this failing is extremely low. This just adds # a message to a queue, so if this doesn't work the world is in an # unrecoverable state and this thread should just bail. - _LOGGER.exception( - "Top-level exception occurred in callback while processing a message" + + _CALLBACK_EXCEPTION_LOGGER.exception( + "Message (id=%s, ack_id=%s, ordering_key=%s, exactly_once=%s)'s callback threw exception, nacking message.", + message.message_id, + message.ack_id, + message.ordering_key, + message.exactly_once_enabled, ) + message.nack() on_callback_error(exc) +def _get_status( + exc: exceptions.GoogleAPICallError, +) -> Optional["status_pb2.Status"]: + if not exc.response: + _LOGGER.debug("No response obj in errored RPC call.") + return None + try: + return rpc_status.from_call(exc.response) + # Possible "If the gRPC call’s code or details are inconsistent + # with the status code and message inside of the + # google.rpc.status.Status" + except ValueError: + _LOGGER.debug("ValueError when parsing ErrorInfo.", exc_info=True) + return None + + +def _get_ack_errors( + exc: exceptions.GoogleAPICallError, +) -> Optional[Dict[str, str]]: + status = _get_status(exc) + if not status: + _LOGGER.debug("Unable to get status of errored RPC.") + return None + for detail in status.details: + info = ErrorInfo() + if not (detail.Is(ErrorInfo.DESCRIPTOR) and detail.Unpack(info)): + _LOGGER.debug("Unable to unpack ErrorInfo.") + return None + return info.metadata + return None + + +def _process_requests( + error_status: Optional["status_pb2.Status"], + ack_reqs_dict: Dict[str, requests.AckRequest], + errors_dict: Optional[Dict[str, str]], + ack_histogram: Optional[histogram.Histogram] = None, + # TODO - Change this param to a Union of Literals when we drop p3.7 support + req_type: str = "ack", +): + """Process requests when exactly-once delivery is enabled by referring to + error_status and errors_dict. + + The errors returned by the server in as `error_status` or in `errors_dict` + are used to complete the request futures in `ack_reqs_dict` (with a success + or exception) or to return requests for further retries. + """ + requests_completed = [] + requests_to_retry = [] + for ack_id, ack_request in ack_reqs_dict.items(): + # Debug logging: slow acks + if ( + req_type == "ack" + and ack_histogram + and ack_request.time_to_ack > ack_histogram.percentile(percent=99) + ): + _SLOW_ACK_LOGGER.debug( + "Message (id=%s, ack_id=%s) ack duration of %s s is higher than the p99 ack duration", + ack_request.message_id, + ack_request.ack_id, + ) + + # Handle special errors returned for ack/modack RPCs via the ErrorInfo + # sidecar metadata when exactly-once delivery is enabled. + if errors_dict and ack_id in errors_dict: + exactly_once_error = errors_dict[ack_id] + if exactly_once_error.startswith("TRANSIENT_"): + requests_to_retry.append(ack_request) + else: + if exactly_once_error == "PERMANENT_FAILURE_INVALID_ACK_ID": + exc = AcknowledgeError(AcknowledgeStatus.INVALID_ACK_ID, info=None) + else: + exc = AcknowledgeError(AcknowledgeStatus.OTHER, exactly_once_error) + future = ack_request.future + if future is not None: + future.set_exception(exc) + requests_completed.append(ack_request) + # Temporary GRPC errors are retried + elif ( + error_status + and error_status.code in _EXACTLY_ONCE_DELIVERY_TEMPORARY_RETRY_ERRORS + ): + requests_to_retry.append(ack_request) + # Other GRPC errors are NOT retried + elif error_status: + if error_status.code == code_pb2.PERMISSION_DENIED: + exc = AcknowledgeError(AcknowledgeStatus.PERMISSION_DENIED, info=None) + elif error_status.code == code_pb2.FAILED_PRECONDITION: + exc = AcknowledgeError(AcknowledgeStatus.FAILED_PRECONDITION, info=None) + else: + exc = AcknowledgeError(AcknowledgeStatus.OTHER, str(error_status)) + future = ack_request.future + if future is not None: + future.set_exception(exc) + requests_completed.append(ack_request) + # Since no error occurred, requests with futures are completed successfully. + elif ack_request.future: + future = ack_request.future + # success + assert future is not None + future.set_result(AcknowledgeStatus.SUCCESS) + requests_completed.append(ack_request) + # All other requests are considered completed. + else: + requests_completed.append(ack_request) + + return requests_completed, requests_to_retry + + class StreamingPullManager(object): """The streaming pull manager coordinates pulling messages from Pub/Sub, leasing them, and scheduling them to be processed. Args: - client (~.pubsub_v1.subscriber.client): The subscriber client used - to create this instance. - subscription (str): The name of the subscription. The canonical - format for this is + client: + The subscriber client used to create this instance. + subscription: + The name of the subscription. The canonical format for this is ``projects/{project}/subscriptions/{subscription}``. - flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow - control settings. - scheduler (~google.cloud.pubsub_v1.scheduler.Scheduler): The scheduler - to use to process messages. If not provided, a thread pool-based - scheduler will be used. + flow_control: + The flow control settings. + scheduler: + The scheduler to use to process messages. If not provided, a thread + pool-based scheduler will be used. + use_legacy_flow_control: + If set to ``True``, flow control at the Cloud Pub/Sub server is disabled, + though client-side flow control is still enabled. If set to ``False`` + (default), both server-side and client-side flow control are enabled. + await_callbacks_on_shutdown: + If ``True``, the shutdown thread will wait until all scheduler threads + terminate and only then proceed with shutting down the remaining running + helper threads. + + If ``False`` (default), the shutdown thread will shut the scheduler down, + but it will not wait for the currently executing scheduler threads to + terminate. + + This setting affects when the on close callbacks get invoked, and + consequently, when the StreamingPullFuture associated with the stream gets + resolved. """ - _UNARY_REQUESTS = True - """If set to True, this class will make requests over a separate unary - RPC instead of over the streaming RPC.""" - def __init__( - self, client, subscription, flow_control=types.FlowControl(), scheduler=None + self, + client: "subscriber.Client", + subscription: str, + flow_control: types.FlowControl = types.FlowControl(), + scheduler: Optional[ThreadScheduler] = None, + use_legacy_flow_control: bool = False, + await_callbacks_on_shutdown: bool = False, ): self._client = client self._subscription = subscription + self._exactly_once_enabled = False self._flow_control = flow_control + self._use_legacy_flow_control = use_legacy_flow_control + self._await_callbacks_on_shutdown = await_callbacks_on_shutdown self._ack_histogram = histogram.Histogram() self._last_histogram_size = 0 - self._ack_deadline = 10 - self._rpc = None - self._callback = None + self._stream_metadata = [ + ["x-goog-request-params", "subscription=" + subscription] + ] + + # If max_duration_per_lease_extension is the default + # we set the stream_ack_deadline to the default of 60 + if self._flow_control.max_duration_per_lease_extension == 0: + self._stream_ack_deadline = _DEFAULT_STREAM_ACK_DEADLINE + # We will not be able to extend more than the default minimum + elif ( + self._flow_control.max_duration_per_lease_extension + < _MIN_STREAM_ACK_DEADLINE + ): + self._stream_ack_deadline = _MIN_STREAM_ACK_DEADLINE + # Will not be able to extend past the max + elif ( + self._flow_control.max_duration_per_lease_extension + > _MAX_STREAM_ACK_DEADLINE + ): + self._stream_ack_deadline = _MAX_STREAM_ACK_DEADLINE + else: + self._stream_ack_deadline = ( + self._flow_control.max_duration_per_lease_extension + ) + + self._ack_deadline = max( + min( + self._flow_control.min_duration_per_lease_extension, + histogram.MAX_ACK_DEADLINE, + ), + histogram.MIN_ACK_DEADLINE, + ) + + self._rpc: Optional[bidi.ResumableBidiRpc] = None + self._callback: Optional[functools.partial] = None self._closing = threading.Lock() self._closed = False - self._close_callbacks = [] + self._close_callbacks: List[Callable[["StreamingPullManager", Any], Any]] = [] + # Guarded by self._exactly_once_enabled_lock + self._send_new_ack_deadline = False + + # A shutdown thread is created on intentional shutdown. + self._regular_shutdown_thread: Optional[threading.Thread] = None # Generate a random client id tied to this object. All streaming pull # connections (initial and re-connects) will then use the same client @@ -124,9 +402,7 @@ def __init__( self._client_id = str(uuid.uuid4()) if scheduler is None: - self._scheduler = ( - google.cloud.pubsub_v1.subscriber.scheduler.ThreadScheduler() - ) + self._scheduler: Optional[ThreadScheduler] = ThreadScheduler() else: self._scheduler = scheduler @@ -145,15 +421,26 @@ def __init__( # currently on hold. self._pause_resume_lock = threading.Lock() + # A lock guarding the self._exactly_once_enabled variable. We may also + # acquire the self._ack_deadline_lock while this lock is held, but not + # the reverse. So, we maintain a simple ordering of these two locks to + # prevent deadlocks. + self._exactly_once_enabled_lock = threading.Lock() + + # A lock protecting the current ACK deadline used in the lease management. This + # value can be potentially updated both by the leaser thread and by the message + # consumer thread when invoking the internal _on_response() callback. + self._ack_deadline_lock = threading.Lock() + # The threads created in ``.open()``. - self._dispatcher = None - self._leaser = None - self._consumer = None - self._heartbeater = None + self._dispatcher: Optional[dispatcher.Dispatcher] = None + self._leaser: Optional[leaser.Leaser] = None + self._consumer: Optional[bidi.BackgroundConsumer] = None + self._heartbeater: Optional[heartbeater.Heartbeater] = None @property - def is_active(self): - """bool: True if this manager is actively streaming. + def is_active(self) -> bool: + """``True`` if this manager is actively streaming. Note that ``False`` does not indicate this is complete shut down, just that it stopped getting new messages. @@ -161,60 +448,92 @@ def is_active(self): return self._consumer is not None and self._consumer.is_active @property - def flow_control(self): - """google.cloud.pubsub_v1.types.FlowControl: The active flow control - settings.""" + def flow_control(self) -> types.FlowControl: + """The active flow control settings.""" return self._flow_control @property - def dispatcher(self): - """google.cloud.pubsub_v1.subscriber._protocol.dispatcher.Dispatcher: - The dispatcher helper. - """ + def dispatcher(self) -> Optional[dispatcher.Dispatcher]: + """The dispatcher helper.""" return self._dispatcher @property - def leaser(self): - """google.cloud.pubsub_v1.subscriber._protocol.leaser.Leaser: - The leaser helper. - """ + def leaser(self) -> Optional["leaser.Leaser"]: + """The leaser helper.""" return self._leaser @property - def ack_histogram(self): - """google.cloud.pubsub_v1.subscriber._protocol.histogram.Histogram: - The histogram tracking time-to-acknowledge. - """ + def ack_histogram(self) -> histogram.Histogram: + """The histogram tracking time-to-acknowledge.""" return self._ack_histogram @property - def ack_deadline(self): - """Return the current ack deadline based on historical time-to-ack. - - This method is "sticky". It will only perform the computations to - check on the right ack deadline if the histogram has gained a - significant amount of new information. + def ack_deadline(self) -> float: + """Return the current ACK deadline based on historical data without updating it. Returns: - int: The ack deadline. + The ack deadline. """ - target_size = min( - self._last_histogram_size * 2, self._last_histogram_size + 100 - ) - hist_size = len(self.ack_histogram) + return self._obtain_ack_deadline(maybe_update=False) + + def _obtain_ack_deadline(self, maybe_update: bool) -> float: + """The actual `ack_deadline` implementation. - if hist_size > target_size: - self._last_histogram_size = hist_size - self._ack_deadline = self.ack_histogram.percentile(percent=99) + This method is "sticky". It will only perform the computations to check on the + right ACK deadline if explicitly requested AND if the histogram with past + time-to-ack data has gained a significant amount of new information. + + Args: + maybe_update: + If ``True``, also update the current ACK deadline before returning it if + enough new ACK data has been gathered. - if self.flow_control.max_duration_per_lease_extension > 0: - self._ack_deadline = min( - self._ack_deadline, self.flow_control.max_duration_per_lease_extension + Returns: + The current ACK deadline in seconds to use. + """ + with self._ack_deadline_lock: + if not maybe_update: + return self._ack_deadline + + target_size = min( + self._last_histogram_size * 2, self._last_histogram_size + 100 ) - return self._ack_deadline + hist_size = len(self.ack_histogram) + + if hist_size > target_size: + self._last_histogram_size = hist_size + self._ack_deadline = self.ack_histogram.percentile(percent=99) + + if self.flow_control.max_duration_per_lease_extension > 0: + # The setting in flow control could be too low, adjust if needed. + flow_control_setting = max( + self.flow_control.max_duration_per_lease_extension, + histogram.MIN_ACK_DEADLINE, + ) + self._ack_deadline = min(self._ack_deadline, flow_control_setting) + + # If the user explicitly sets a min ack_deadline, respect it. + if self.flow_control.min_duration_per_lease_extension > 0: + # The setting in flow control could be too high, adjust if needed. + flow_control_setting = min( + self.flow_control.min_duration_per_lease_extension, + histogram.MAX_ACK_DEADLINE, + ) + self._ack_deadline = max(self._ack_deadline, flow_control_setting) + elif self._exactly_once_enabled: + # Higher minimum ack_deadline for subscriptions with + # exactly-once delivery enabled. + self._ack_deadline = max( + self._ack_deadline, _MIN_ACK_DEADLINE_SECS_WHEN_EXACTLY_ONCE_ENABLED + ) + # If we have updated the ack_deadline and it is longer than the stream_ack_deadline + # set the stream_ack_deadline to the new ack_deadline. + if self._ack_deadline > self._stream_ack_deadline: + self._stream_ack_deadline = self._ack_deadline + return self._ack_deadline @property - def load(self): + def load(self) -> float: """Return the current load. The load is represented as a float, where 1.0 represents having @@ -228,7 +547,7 @@ def load(self): running room on setting A if setting B is over.) Returns: - float: The load value. + The load value. """ if self._leaser is None: return 0.0 @@ -248,15 +567,17 @@ def load(self): ] ) - def add_close_callback(self, callback): + def add_close_callback( + self, callback: Callable[["StreamingPullManager", Any], Any] + ) -> None: """Schedules a callable when the manager closes. Args: - callback (Callable): The method to call. + The method to call. """ self._close_callbacks.append(callback) - def activate_ordering_keys(self, ordering_keys): + def activate_ordering_keys(self, ordering_keys: Iterable[str]) -> None: """Send the next message in the queue for each of the passed-in ordering keys, if they exist. Clean up state for keys that no longer have any queued messages. @@ -267,25 +588,30 @@ def activate_ordering_keys(self, ordering_keys): This decision is by design because it simplifies MessagesOnHold. Args: - ordering_keys(Sequence[str]): A sequence of ordering keys to - activate. May be empty. + ordering_keys: + A sequence of ordering keys to activate. May be empty. """ with self._pause_resume_lock: + if self._scheduler is None: + return # We are shutting down, don't try to dispatch any more messages. + self._messages_on_hold.activate_ordering_keys( ordering_keys, self._schedule_message_on_hold ) - def maybe_pause_consumer(self): + def maybe_pause_consumer(self) -> None: """Check the current load and pause the consumer if needed.""" with self._pause_resume_lock: if self.load >= _MAX_LOAD: if self._consumer is not None and not self._consumer.is_paused: - _LOGGER.debug( - "Message backlog over load at %.2f, pausing.", self.load + _FLOW_CONTROL_LOGGER.debug( + "Message backlog over load at %.2f (threshold %.2f), initiating client-side flow control", + self.load, + _RESUME_THRESHOLD, ) self._consumer.pause() - def maybe_resume_consumer(self): + def maybe_resume_consumer(self) -> None: """Check the load and held messages and resume the consumer if needed. If there are messages held internally, release those messages before @@ -308,12 +634,20 @@ def maybe_resume_consumer(self): self._maybe_release_messages() if self.load < _RESUME_THRESHOLD: - _LOGGER.debug("Current load is %.2f, resuming consumer.", self.load) + _FLOW_CONTROL_LOGGER.debug( + "Current load is %.2f (threshold %.2f), suspending client-side flow control.", + self.load, + _RESUME_THRESHOLD, + ) self._consumer.resume() else: - _LOGGER.debug("Did not resume, current load is %.2f.", self.load) + _FLOW_CONTROL_LOGGER.debug( + "Current load is %.2f (threshold %.2f), retaining client-side flow control.", + self.load, + _RESUME_THRESHOLD, + ) - def _maybe_release_messages(self): + def _maybe_release_messages(self) -> None: """Release (some of) the held messages if the current load allows for it. The method tries to release as many messages as the current leaser load @@ -330,20 +664,23 @@ def _maybe_release_messages(self): msg = self._messages_on_hold.get() if not msg: break - + if msg.opentelemetry_data: + msg.opentelemetry_data.end_subscribe_scheduler_span() self._schedule_message_on_hold(msg) released_ack_ids.append(msg.ack_id) + + assert self._leaser is not None self._leaser.start_lease_expiry_timer(released_ack_ids) - def _schedule_message_on_hold(self, msg): - """Schedule a message on hold to be sent to the user and change - on-hold-bytes. + def _schedule_message_on_hold( + self, msg: "google.cloud.pubsub_v1.subscriber.message.Message" + ): + """Schedule a message on hold to be sent to the user and change on-hold-bytes. The method assumes the caller has acquired the ``_pause_resume_lock``. Args: - msg (google.cloud.pubsub_v1.message.Message): The message to - schedule to be sent to the user. + msg: The message to schedule to be sent to the user. """ assert msg, "Message must not be None." @@ -362,84 +699,220 @@ def _schedule_message_on_hold(self, msg): self._messages_on_hold.size, self._on_hold_bytes, ) + assert self._scheduler is not None + assert self._callback is not None + if msg.opentelemetry_data: + msg.opentelemetry_data.start_subscribe_concurrency_control_span() self._scheduler.schedule(self._callback, msg) - def _send_unary_request(self, request): - """Send a request using a separate unary request instead of over the - stream. + def send_unary_ack( + self, ack_ids, ack_reqs_dict + ) -> Tuple[List[requests.AckRequest], List[requests.AckRequest]]: + """Send a request using a separate unary request instead of over the stream. - Args: - request (types.StreamingPullRequest): The stream request to be - mapped into unary requests. + If a RetryError occurs, the manager shutdown is triggered, and the + error is re-raised. """ - if request.ack_ids: - self._client.acknowledge( - subscription=self._subscription, ack_ids=list(request.ack_ids) + assert ack_ids + assert len(ack_ids) == len(ack_reqs_dict) + + error_status = None + ack_errors_dict = None + try: + self._client.acknowledge(subscription=self._subscription, ack_ids=ack_ids) + except exceptions.GoogleAPICallError as exc: + _LOGGER.debug( + "Exception while sending unary RPC. This is typically " + "non-fatal as stream requests are best-effort.", + exc_info=True, ) + error_status = _get_status(exc) + ack_errors_dict = _get_ack_errors(exc) + except exceptions.RetryError as exc: + exactly_once_delivery_enabled = self._exactly_once_delivery_enabled() + # Makes sure to complete futures so they don't block forever. + for req in ack_reqs_dict.values(): + # Futures may be present even with exactly-once delivery + # disabled, in transition periods after the setting is changed on + # the subscription. + if req.future: + if exactly_once_delivery_enabled: + e = AcknowledgeError( + AcknowledgeStatus.OTHER, "RetryError while sending ack RPC." + ) + req.future.set_exception(e) + else: + req.future.set_result(AcknowledgeStatus.SUCCESS) - if request.modify_deadline_ack_ids: - # Send ack_ids with the same deadline seconds together. - deadline_to_ack_ids = collections.defaultdict(list) - - for n, ack_id in enumerate(request.modify_deadline_ack_ids): - deadline = request.modify_deadline_seconds[n] - deadline_to_ack_ids[deadline].append(ack_id) + _LOGGER.debug( + "RetryError while sending ack RPC. Waiting on a transient " + "error resolution for too long, will now trigger shutdown.", + exc_info=False, + ) + # The underlying channel has been suffering from a retryable error + # for too long, time to give up and shut the streaming pull down. + self._on_rpc_done(exc) + raise + + if self._exactly_once_delivery_enabled(): + requests_completed, requests_to_retry = _process_requests( + error_status, ack_reqs_dict, ack_errors_dict, self.ack_histogram, "ack" + ) + else: + requests_completed = [] + requests_to_retry = [] + # When exactly-once delivery is NOT enabled, acks/modacks are considered + # best-effort. So, they always succeed even if the RPC fails. + for req in ack_reqs_dict.values(): + # Futures may be present even with exactly-once delivery + # disabled, in transition periods after the setting is changed on + # the subscription. + if req.future: + req.future.set_result(AcknowledgeStatus.SUCCESS) + requests_completed.append(req) + + return requests_completed, requests_to_retry + + def send_unary_modack( + self, + modify_deadline_ack_ids, + modify_deadline_seconds, + ack_reqs_dict, + default_deadline=None, + ) -> Tuple[List[requests.ModAckRequest], List[requests.ModAckRequest]]: + """Send a request using a separate unary request instead of over the stream. - for deadline, ack_ids in six.iteritems(deadline_to_ack_ids): + If a RetryError occurs, the manager shutdown is triggered, and the + error is re-raised. + """ + assert modify_deadline_ack_ids + # Either we have a generator or a single deadline. + assert modify_deadline_seconds is None or default_deadline is None + + error_status = None + modack_errors_dict = None + try: + if default_deadline is None: + # Send ack_ids with the same deadline seconds together. + deadline_to_ack_ids = collections.defaultdict(list) + + for n, ack_id in enumerate(modify_deadline_ack_ids): + deadline = modify_deadline_seconds[n] + deadline_to_ack_ids[deadline].append(ack_id) + + for deadline, ack_ids in deadline_to_ack_ids.items(): + self._client.modify_ack_deadline( + subscription=self._subscription, + ack_ids=ack_ids, + ack_deadline_seconds=deadline, + ) + else: + # We can send all requests with the default deadline. self._client.modify_ack_deadline( subscription=self._subscription, - ack_ids=ack_ids, - ack_deadline_seconds=deadline, + ack_ids=modify_deadline_ack_ids, + ack_deadline_seconds=default_deadline, ) + except exceptions.GoogleAPICallError as exc: + _LOGGER.debug( + "Exception while sending unary RPC. This is typically " + "non-fatal as stream requests are best-effort.", + exc_info=True, + ) + error_status = _get_status(exc) + modack_errors_dict = _get_ack_errors(exc) + except exceptions.RetryError as exc: + exactly_once_delivery_enabled = self._exactly_once_delivery_enabled() + # Makes sure to complete futures so they don't block forever. + for req in ack_reqs_dict.values(): + # Futures may be present even with exactly-once delivery + # disabled, in transition periods after the setting is changed on + # the subscription. + if req.future: + if exactly_once_delivery_enabled: + e = AcknowledgeError( + AcknowledgeStatus.OTHER, + "RetryError while sending modack RPC.", + ) + req.future.set_exception(e) + else: + req.future.set_result(AcknowledgeStatus.SUCCESS) - _LOGGER.debug("Sent request(s) over unary RPC.") - - def send(self, request): - """Queue a request to be sent to the RPC. + _LOGGER.debug( + "RetryError while sending modack RPC. Waiting on a transient " + "error resolution for too long, will now trigger shutdown.", + exc_info=False, + ) + # The underlying channel has been suffering from a retryable error + # for too long, time to give up and shut the streaming pull down. + self._on_rpc_done(exc) + raise + + if self._exactly_once_delivery_enabled(): + requests_completed, requests_to_retry = _process_requests( + error_status, + ack_reqs_dict, + modack_errors_dict, + self.ack_histogram, + "modack", + ) + else: + requests_completed = [] + requests_to_retry = [] + # When exactly-once delivery is NOT enabled, acks/modacks are considered + # best-effort. So, they always succeed even if the RPC fails. + for req in ack_reqs_dict.values(): + # Futures may be present even with exactly-once delivery + # disabled, in transition periods after the setting is changed on + # the subscription. + if req.future: + req.future.set_result(AcknowledgeStatus.SUCCESS) + requests_completed.append(req) + + return requests_completed, requests_to_retry + + def heartbeat(self) -> bool: + """Sends a heartbeat request over the streaming pull RPC. + + The request is empty by default, but may contain the current ack_deadline + if the self._exactly_once_enabled flag has changed. - If a RetryError occurs, the manager shutdown is triggered, and the - error is re-raised. + Returns: + If a heartbeat request has actually been sent. """ - if self._UNARY_REQUESTS: - try: - self._send_unary_request(request) - except exceptions.GoogleAPICallError: - _LOGGER.debug( - "Exception while sending unary RPC. This is typically " - "non-fatal as stream requests are best-effort.", - exc_info=True, + if self._rpc is not None and self._rpc.is_active: + send_new_ack_deadline = False + with self._exactly_once_enabled_lock: + send_new_ack_deadline = self._send_new_ack_deadline + self._send_new_ack_deadline = False + + if send_new_ack_deadline: + request = gapic_types.StreamingPullRequest( + stream_ack_deadline_seconds=self._stream_ack_deadline ) - except exceptions.RetryError as exc: _LOGGER.debug( - "RetryError while sending unary RPC. Waiting on a transient " - "error resolution for too long, will now trigger shutdown.", - exc_info=False, + "Sending new ack_deadline of %d seconds.", self._stream_ack_deadline ) - # The underlying channel has been suffering from a retryable error - # for too long, time to give up and shut the streaming pull down. - self._on_rpc_done(exc) - raise + else: + request = gapic_types.StreamingPullRequest() - else: self._rpc.send(request) + return True - def heartbeat(self): - """Sends an empty request over the streaming pull RPC. - - This always sends over the stream, regardless of if - ``self._UNARY_REQUESTS`` is set or not. - """ - if self._rpc is not None and self._rpc.is_active: - self._rpc.send(types.StreamingPullRequest()) + return False - def open(self, callback, on_callback_error): + def open( + self, + callback: Callable[["google.cloud.pubsub_v1.subscriber.message.Message"], Any], + on_callback_error: Callable[[Exception], Any], + ) -> None: """Begin consuming messages. Args: - callback (Callable[None, google.cloud.pubsub_v1.message.Message]): + callback: A callback that will be called for each message received on the stream. - on_callback_error (Callable[Exception]): + on_callback_error: A callable that will be called if an exception is raised in the provided `callback`. """ @@ -454,29 +927,43 @@ def open(self, callback, on_callback_error): ) # Create the RPC - stream_ack_deadline_seconds = self.ack_histogram.percentile(99) + stream_ack_deadline_seconds = self._stream_ack_deadline get_initial_request = functools.partial( self._get_initial_request, stream_ack_deadline_seconds ) self._rpc = bidi.ResumableBidiRpc( - start_rpc=self._client.api.streaming_pull, + start_rpc=self._client.streaming_pull, initial_request=get_initial_request, should_recover=self._should_recover, should_terminate=self._should_terminate, + metadata=self._stream_metadata, throttle_reopen=True, ) self._rpc.add_done_callback(self._on_rpc_done) _LOGGER.debug( "Creating a stream, default ACK deadline set to {} seconds.".format( - stream_ack_deadline_seconds + self._stream_ack_deadline ) ) # Create references to threads - self._dispatcher = dispatcher.Dispatcher(self, self._scheduler.queue) - self._consumer = bidi.BackgroundConsumer(self._rpc, self._on_response) + assert self._scheduler is not None + scheduler_queue = self._scheduler.queue + self._dispatcher = dispatcher.Dispatcher(self, scheduler_queue) + + # `on_fatal_exception` is only available in more recent library versions. + # For backwards compatibility reasons, we only pass it when `google-api-core` supports it. + if _SHOULD_USE_ON_FATAL_ERROR_CALLBACK: + self._consumer = bidi.BackgroundConsumer( + self._rpc, + self._on_response, + on_fatal_exception=self._on_fatal_exception, + ) + else: + self._consumer = bidi.BackgroundConsumer(self._rpc, self._on_response) + self._leaser = leaser.Leaser(self) self._heartbeater = heartbeater.Heartbeater(self) @@ -492,16 +979,36 @@ def open(self, callback, on_callback_error): # Start the stream heartbeater thread. self._heartbeater.start() - def close(self, reason=None): + def close(self, reason: Any = None) -> None: """Stop consuming messages and shutdown all helper threads. This method is idempotent. Additional calls will have no effect. + The method does not block, it delegates the shutdown operations to a background + thread. + Args: - reason (Any): The reason to close this. If None, this is considered + reason: + The reason to close this. If ``None``, this is considered an "intentional" shutdown. This is passed to the callbacks specified via :meth:`add_close_callback`. """ + self._regular_shutdown_thread = threading.Thread( + name=_REGULAR_SHUTDOWN_THREAD_NAME, + daemon=True, + target=self._shutdown, + kwargs={"reason": reason}, + ) + self._regular_shutdown_thread.start() + + def _shutdown(self, reason: Any = None) -> None: + """Run the actual shutdown sequence (stop the stream and all helper threads). + + Args: + reason: + The reason to close the stream. If ``None``, this is considered + an "intentional" shutdown. + """ with self._closing: if self._closed: return @@ -509,12 +1016,16 @@ def close(self, reason=None): # Stop consuming messages. if self.is_active: _LOGGER.debug("Stopping consumer.") + assert self._consumer is not None self._consumer.stop() self._consumer = None # Shutdown all helper threads _LOGGER.debug("Stopping scheduler.") - self._scheduler.shutdown() + assert self._scheduler is not None + dropped_messages = self._scheduler.shutdown( + await_msg_callbacks=self._await_callbacks_on_shutdown + ) self._scheduler = None # Leaser and dispatcher reference each other through the shared @@ -527,13 +1038,28 @@ def close(self, reason=None): # for the manager's maybe_resume_consumer() / maybe_pause_consumer(), # because the consumer gets shut down first. _LOGGER.debug("Stopping leaser.") + assert self._leaser is not None self._leaser.stop() + + total = len(dropped_messages) + len( + self._messages_on_hold._messages_on_hold + ) + _LOGGER.debug(f"NACK-ing all not-yet-dispatched messages (total: {total}).") + messages_to_nack = itertools.chain( + dropped_messages, self._messages_on_hold._messages_on_hold + ) + for msg in messages_to_nack: + msg.nack() + _LOGGER.debug("Stopping dispatcher.") + assert self._dispatcher is not None self._dispatcher.stop() self._dispatcher = None # dispatcher terminated, OK to dispose the leaser reference now self._leaser = None + _LOGGER.debug("Stopping heartbeater.") + assert self._heartbeater is not None self._heartbeater.stop() self._heartbeater = None @@ -544,45 +1070,167 @@ def close(self, reason=None): for callback in self._close_callbacks: callback(self, reason) - def _get_initial_request(self, stream_ack_deadline_seconds): + def _get_initial_request( + self, stream_ack_deadline_seconds: int + ) -> gapic_types.StreamingPullRequest: """Return the initial request for the RPC. This defines the initial request that must always be sent to Pub/Sub immediately upon opening the subscription. Args: - stream_ack_deadline_seconds (int): + stream_ack_deadline_seconds: The default message acknowledge deadline for the stream. Returns: - google.cloud.pubsub_v1.types.StreamingPullRequest: A request - suitable for being the first request on the stream (and not + A request suitable for being the first request on the stream (and not suitable for any other purpose). """ - # Any ack IDs that are under lease management need to have their - # deadline extended immediately. - if self._leaser is not None: - # Explicitly copy the list, as it could be modified by another - # thread. - lease_ids = list(self._leaser.ack_ids) - else: - lease_ids = [] - # Put the request together. - request = types.StreamingPullRequest( - modify_deadline_ack_ids=list(lease_ids), - modify_deadline_seconds=[self.ack_deadline] * len(lease_ids), + # We need to set streaming ack deadline, but it's not useful since we'll modack to send receipt + # anyway. Set to some big-ish value in case we modack late. + request = gapic_types.StreamingPullRequest( stream_ack_deadline_seconds=stream_ack_deadline_seconds, + modify_deadline_ack_ids=[], + modify_deadline_seconds=[], subscription=self._subscription, client_id=self._client_id, - max_outstanding_messages=self._flow_control.max_messages, - max_outstanding_bytes=self._flow_control.max_bytes, + max_outstanding_messages=( + 0 if self._use_legacy_flow_control else self._flow_control.max_messages + ), + max_outstanding_bytes=( + 0 if self._use_legacy_flow_control else self._flow_control.max_bytes + ), ) # Return the initial request. return request - def _on_response(self, response): + def _send_lease_modacks( + self, + ack_ids: Iterable[str], + ack_deadline: float, + opentelemetry_data: List[SubscribeOpenTelemetry], + warn_on_invalid=True, + receipt_modack: bool = False, + ) -> Set[str]: + exactly_once_enabled = False + + modack_span: Optional[trace.Span] = None + if self._client.open_telemetry_enabled: + subscribe_span_links: List[trace.Link] = [] + subscribe_spans: List[trace.Span] = [] + subscription_split: List[str] = self._subscription.split("/") + assert len(subscription_split) == 4 + subscription_id: str = subscription_split[3] + project_id: str = subscription_split[1] + for data in opentelemetry_data: + subscribe_span: Optional[trace.Span] = data.subscribe_span + if ( + subscribe_span + and subscribe_span.get_span_context().trace_flags.sampled + ): + subscribe_span_links.append( + trace.Link(subscribe_span.get_span_context()) + ) + subscribe_spans.append(subscribe_span) + modack_span = start_modack_span( + subscribe_span_links, + subscription_id, + len(opentelemetry_data), + ack_deadline, + project_id, + "_send_lease_modacks", + receipt_modack, + ) + if ( + modack_span and modack_span.get_span_context().trace_flags.sampled + ): # pragma: NO COVER + modack_span_context: trace.SpanContext = modack_span.get_span_context() + for subscribe_span in subscribe_spans: + subscribe_span.add_link( + context=modack_span_context, + attributes={ + "messaging.operation.name": "modack", + }, + ) + + with self._exactly_once_enabled_lock: + exactly_once_enabled = self._exactly_once_enabled + if exactly_once_enabled: + eod_items: List[requests.ModAckRequest] = [] + if self._client.open_telemetry_enabled: + for ack_id, data in zip( + ack_ids, opentelemetry_data + ): # pragma: NO COVER # Identical code covered in the same function below + assert data is not None + eod_items.append( + requests.ModAckRequest( + ack_id, + ack_deadline, + futures.Future(), + data, + ) + ) + else: + eod_items = [ + requests.ModAckRequest(ack_id, ack_deadline, futures.Future()) + for ack_id in ack_ids + ] + + assert self._dispatcher is not None + self._dispatcher.modify_ack_deadline(eod_items, ack_deadline) + if ( + modack_span + ): # pragma: NO COVER # Identical code covered in the same function below + modack_span.end() + expired_ack_ids = set() + for req in eod_items: + try: + assert req.future is not None + req.future.result() + except AcknowledgeError as ack_error: + if ( + ack_error.error_code != AcknowledgeStatus.INVALID_ACK_ID + or warn_on_invalid + ): + _LOGGER.warning( + "AcknowledgeError when lease-modacking a message.", + exc_info=True, + ) + if ack_error.error_code == AcknowledgeStatus.INVALID_ACK_ID: + expired_ack_ids.add(req.ack_id) + return expired_ack_ids + else: + items: List[requests.ModAckRequest] = [] + if self._client.open_telemetry_enabled: + for ack_id, data in zip(ack_ids, opentelemetry_data): + assert data is not None + items.append( + requests.ModAckRequest( + ack_id, + self.ack_deadline, + None, + data, + ) + ) + else: + items = [ + requests.ModAckRequest(ack_id, self.ack_deadline, None) + for ack_id in ack_ids + ] + assert self._dispatcher is not None + self._dispatcher.modify_ack_deadline(items, ack_deadline) + if modack_span: + modack_span.end() + return set() + + def _exactly_once_delivery_enabled(self) -> bool: + """Whether exactly-once delivery is enabled for the subscription.""" + with self._exactly_once_enabled_lock: + return self._exactly_once_enabled + + def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: """Process all received Pub/Sub messages. For each message, send a modified acknowledgment request to the @@ -601,44 +1249,108 @@ def _on_response(self, response): ) return + # IMPORTANT: Circumvent the wrapper class and operate on the raw underlying + # protobuf message to significantly gain on attribute access performance. + received_messages = response._pb.received_messages + + subscribe_opentelemetry: List[SubscribeOpenTelemetry] = [] + if self._client.open_telemetry_enabled: + for received_message in received_messages: + opentelemetry_data = SubscribeOpenTelemetry(received_message.message) + opentelemetry_data.start_subscribe_span( + self._subscription, + response.subscription_properties.exactly_once_delivery_enabled, + received_message.ack_id, + received_message.delivery_attempt, + ) + subscribe_opentelemetry.append(opentelemetry_data) + _LOGGER.debug( "Processing %s received message(s), currently on hold %s (bytes %s).", - len(response.received_messages), + len(received_messages), self._messages_on_hold.size, self._on_hold_bytes, ) + with self._exactly_once_enabled_lock: + if ( + response.subscription_properties.exactly_once_delivery_enabled + != self._exactly_once_enabled + ): + self._exactly_once_enabled = ( + response.subscription_properties.exactly_once_delivery_enabled + ) + # Update ack_deadline, whose minimum depends on self._exactly_once_enabled + # This method acquires the self._ack_deadline_lock lock. + self._obtain_ack_deadline(maybe_update=True) + self._send_new_ack_deadline = True + # Immediately (i.e. without waiting for the auto lease management) # modack the messages we received, as this tells the server that we've # received them. - items = [ - requests.ModAckRequest(message.ack_id, self._ack_histogram.percentile(99)) - for message in response.received_messages - ] - self._dispatcher.modify_ack_deadline(items) + ack_id_gen = (message.ack_id for message in received_messages) + expired_ack_ids = self._send_lease_modacks( + ack_id_gen, + self.ack_deadline, + subscribe_opentelemetry, + warn_on_invalid=False, + receipt_modack=True, + ) + + if len(expired_ack_ids): + _EXPIRY_LOGGER.debug( + "ack ids %s were dropped as they have already expired.", expired_ack_ids + ) with self._pause_resume_lock: - for received_message in response.received_messages: - message = google.cloud.pubsub_v1.subscriber.message.Message( - received_message.message, - received_message.ack_id, - received_message.delivery_attempt, - self._scheduler.queue, - ) - self._messages_on_hold.put(message) - self._on_hold_bytes += message.size - req = requests.LeaseRequest( - ack_id=message.ack_id, - byte_size=message.size, - ordering_key=message.ordering_key, + if self._scheduler is None or self._leaser is None: + _LOGGER.debug( + f"self._scheduler={self._scheduler} or self._leaser={self._leaser} is None. Stopping further processing." ) - self.leaser.add([req]) + return + + i: int = 0 + for received_message in received_messages: + if ( + not self._exactly_once_delivery_enabled() + or received_message.ack_id not in expired_ack_ids + ): + message = google.cloud.pubsub_v1.subscriber.message.Message( + received_message.message, + received_message.ack_id, + received_message.delivery_attempt, + self._scheduler.queue, + self._exactly_once_delivery_enabled, + ) + if self._client.open_telemetry_enabled: + message.opentelemetry_data = subscribe_opentelemetry[i] + i = i + 1 + self._messages_on_hold.put(message) + self._on_hold_bytes += message.size + req = requests.LeaseRequest( + ack_id=message.ack_id, + byte_size=message.size, + ordering_key=message.ordering_key, + opentelemetry_data=message.opentelemetry_data, + ) + self._leaser.add([req]) self._maybe_release_messages() self.maybe_pause_consumer() - def _should_recover(self, exception): + def _on_fatal_exception(self, exception: BaseException) -> None: + """ + Called whenever `self.consumer` receives a non-retryable exception. + We close the manager on such non-retryable cases. + """ + _LOGGER.info( + "Streaming pull terminating after receiving non-recoverable error: %s", + exception, + ) + self.close(exception) + + def _should_recover(self, exception: BaseException) -> bool: """Determine if an error on the RPC stream should be recovered. If the exception is one of the retryable exceptions, this will signal @@ -647,20 +1359,24 @@ def _should_recover(self, exception): This will cause the stream to exit when it returns :data:`False`. Returns: - bool: Indicates if the caller should recover or shut down. + Indicates if the caller should recover or shut down. Will be :data:`True` if the ``exception`` is "acceptable", i.e. in a list of retryable / idempotent exceptions. """ - exception = _maybe_wrap_exception(exception) + exception = _wrap_as_exception(exception) # If this is in the list of idempotent exceptions, then we want to # recover. if isinstance(exception, _RETRYABLE_STREAM_ERRORS): - _LOGGER.info("Observed recoverable stream error %s", exception) + _STREAMS_LOGGER.debug( + "Observed recoverable stream error %s, reopening stream", exception + ) return True - _LOGGER.info("Observed non-recoverable stream error %s", exception) + _STREAMS_LOGGER.debug( + "Observed non-recoverable stream error %s, shutting down stream", exception + ) return False - def _should_terminate(self, exception): + def _should_terminate(self, exception: BaseException) -> bool: """Determine if an error on the RPC stream should be terminated. If the exception is one of the terminating exceptions, this will signal @@ -669,18 +1385,24 @@ def _should_terminate(self, exception): This will cause the stream to exit when it returns :data:`True`. Returns: - bool: Indicates if the caller should terminate or attempt recovery. + Indicates if the caller should terminate or attempt recovery. Will be :data:`True` if the ``exception`` is "acceptable", i.e. in a list of terminating exceptions. """ - exception = _maybe_wrap_exception(exception) - if isinstance(exception, _TERMINATING_STREAM_ERRORS): - _LOGGER.info("Observed terminating stream error %s", exception) + exception = _wrap_as_exception(exception) + is_api_error = isinstance(exception, exceptions.GoogleAPICallError) + # Terminate any non-API errors, or non-retryable errors (permission denied, unauthorized, etc.) + if not is_api_error or isinstance(exception, _TERMINATING_STREAM_ERRORS): + _STREAMS_LOGGER.debug( + "Observed terminating stream error %s, shutting down stream", exception + ) return True - _LOGGER.info("Observed non-terminating stream error %s", exception) + _STREAMS_LOGGER.debug( + "Observed non-terminating stream error %s, attempting to reopen", exception + ) return False - def _on_rpc_done(self, future): + def _on_rpc_done(self, future: Any) -> None: """Triggered whenever the underlying RPC terminates without recovery. This is typically triggered from one of two threads: the background @@ -691,10 +1413,10 @@ def _on_rpc_done(self, future): with shutting everything down. This is to prevent blocking in the background consumer and preventing it from being ``joined()``. """ - _LOGGER.info("RPC termination has signaled streaming pull manager shutdown.") - future = _maybe_wrap_exception(future) + _LOGGER.debug("RPC termination has signaled streaming pull manager shutdown.") + error = _wrap_as_exception(future) thread = threading.Thread( - name=_RPC_ERROR_THREAD_NAME, target=self.close, kwargs={"reason": future} + name=_RPC_ERROR_THREAD_NAME, target=self._shutdown, kwargs={"reason": error} ) thread.daemon = True thread.start() diff --git a/google/cloud/pubsub_v1/subscriber/client.py b/google/cloud/pubsub_v1/subscriber/client.py index 00c8f2498..41277e5e1 100644 --- a/google/cloud/pubsub_v1/subscriber/client.py +++ b/google/cloud/pubsub_v1/subscriber/client.py @@ -14,34 +14,31 @@ from __future__ import absolute_import +import sys import os -import pkg_resources -import six +import typing +from typing import cast, Any, Callable, Optional, Sequence, Union +import warnings -import grpc +from google.auth.credentials import AnonymousCredentials # type: ignore +from google.oauth2 import service_account # type: ignore -from google.api_core import grpc_helpers -from google.oauth2 import service_account - -from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.gapic import subscriber_client -from google.cloud.pubsub_v1.gapic.transports import subscriber_grpc_transport from google.cloud.pubsub_v1.subscriber import futures from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager +from google.pubsub_v1.services.subscriber import client as subscriber_client +from google.pubsub_v1 import gapic_version as package_version +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud.pubsub_v1 import subscriber + from google.pubsub_v1.services.subscriber.transports.grpc import ( + SubscriberGrpcTransport, + ) -__version__ = pkg_resources.get_distribution("google-cloud-pubsub").version - -_BLACKLISTED_METHODS = ( - "publish", - "from_service_account_file", - "from_service_account_json", -) +__version__ = package_version.__version__ -@_gapic.add_methods(subscriber_client.SubscriberClient, blacklist=_BLACKLISTED_METHODS) -class Client(object): +class Client(subscriber_client.SubscriberClient): """A subscriber client for Google Cloud Pub/Sub. This creates an object that is capable of subscribing to messages. @@ -49,7 +46,7 @@ class Client(object): get sensible defaults. Args: - kwargs (dict): Any additional arguments provided are sent as keyword + kwargs: Any additional arguments provided are sent as keyword keyword arguments to the underlying :class:`~google.cloud.pubsub_v1.gapic.subscriber_client.SubscriberClient`. Generally you should not need to set additional keyword @@ -71,61 +68,69 @@ class Client(object): ) """ - def __init__(self, **kwargs): + def __init__( + self, + subscriber_options: Union[types.SubscriberOptions, Sequence] = (), + **kwargs: Any + ): + assert ( + isinstance(subscriber_options, types.SubscriberOptions) + or len(subscriber_options) == 0 + ), "subscriber_options must be of type SubscriberOptions or an empty sequence." + # Sanity check: Is our goal to use the emulator? # If so, create a grpc insecure channel with the emulator host # as the target. + # TODO(https://github.com/googleapis/python-pubsub/issues/1349): Move the emulator + # code below to test files. if os.environ.get("PUBSUB_EMULATOR_HOST"): - kwargs["channel"] = grpc.insecure_channel( - target=os.environ.get("PUBSUB_EMULATOR_HOST") - ) + kwargs["client_options"] = { + "api_endpoint": os.environ.get("PUBSUB_EMULATOR_HOST") + } + # Configure credentials directly to transport, if provided. + if "transport" not in kwargs: + kwargs["credentials"] = AnonymousCredentials() - # api_endpoint wont be applied if 'transport' is passed in. - client_options = kwargs.pop("client_options", None) + # Instantiate the underlying GAPIC client. + super().__init__(**kwargs) + self._target = self._transport._host + self._closed = False + + self.subscriber_options = types.SubscriberOptions(*subscriber_options) + + # Set / override Open Telemetry option. + self._open_telemetry_enabled = ( + self.subscriber_options.enable_open_telemetry_tracing + ) + # OpenTelemetry features used by the library are not supported in Python versions <= 3.7. + # Refer https://github.com/open-telemetry/opentelemetry-python/issues/3993#issuecomment-2211976389 if ( - client_options - and "api_endpoint" in client_options - and isinstance(client_options["api_endpoint"], six.string_types) + self.subscriber_options.enable_open_telemetry_tracing + and sys.version_info.major == 3 + and sys.version_info.minor < 8 ): - self._target = client_options["api_endpoint"] - else: - self._target = subscriber_client.SubscriberClient.SERVICE_ADDRESS - - # Use a custom channel. - # We need this in order to set appropriate default message size and - # keepalive options. - if "transport" not in kwargs: - channel = kwargs.pop("channel", None) - if channel is None: - channel = grpc_helpers.create_channel( - credentials=kwargs.pop("credentials", None), - target=self.target, - scopes=subscriber_client.SubscriberClient._DEFAULT_SCOPES, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - "grpc.keepalive_time_ms": 30000, - }.items(), - ) - # cannot pass both 'channel' and 'credentials' - kwargs.pop("credentials", None) - transport = subscriber_grpc_transport.SubscriberGrpcTransport( - channel=channel + warnings.warn( + message="Open Telemetry for Python version 3.7 or lower is not supported. Disabling Open Telemetry tracing.", + category=RuntimeWarning, ) - kwargs["transport"] = transport + self._open_telemetry_enabled = False - # Add the metrics headers, and instantiate the underlying GAPIC - # client. - self._api = subscriber_client.SubscriberClient(**kwargs) + @property + def open_telemetry_enabled(self) -> bool: + """ + Returns True if Open Telemetry is enabled. False otherwise. + """ + return self._open_telemetry_enabled # pragma: NO COVER @classmethod - def from_service_account_file(cls, filename, **kwargs): + def from_service_account_file( # type: ignore[override] + cls, filename: str, **kwargs: Any + ) -> "Client": """Creates an instance of this client using the provided credentials file. Args: - filename (str): The path to the service account private key json - file. + filename: The path to the service account private key json file. kwargs: Additional arguments to pass to the constructor. Returns: @@ -136,23 +141,54 @@ def from_service_account_file(cls, filename, **kwargs): kwargs["credentials"] = credentials return cls(**kwargs) - from_service_account_json = from_service_account_file + from_service_account_json = from_service_account_file # type: ignore[assignment] @property - def target(self): + def target(self) -> str: """Return the target (where the API is). Returns: - str: The location of the API. + The location of the API. """ return self._target + @property + def closed(self) -> bool: + """Return whether the client has been closed and cannot be used anymore. + + .. versionadded:: 2.8.0 + """ + return self._closed + @property def api(self): - """The underlying gapic API client.""" - return self._api + """The underlying gapic API client. - def subscribe(self, subscription, callback, flow_control=(), scheduler=None): + .. versionchanged:: 2.10.0 + Instead of a GAPIC ``SubscriberClient`` client instance, this property is a + proxy object to it with the same interface. + + .. deprecated:: 2.10.0 + Use the GAPIC methods and properties on the client instance directly + instead of through the :attr:`api` attribute. + """ + msg = ( + 'The "api" property only exists for backward compatibility, access its ' + 'attributes directly thorugh the client instance (e.g. "client.foo" ' + 'instead of "client.api.foo").' + ) + warnings.warn(msg, category=DeprecationWarning) + return super() + + def subscribe( + self, + subscription: str, + callback: Callable[["subscriber.message.Message"], Any], + flow_control: Union[types.FlowControl, Sequence] = (), + scheduler: Optional["subscriber.scheduler.ThreadScheduler"] = None, + use_legacy_flow_control: bool = False, + await_callbacks_on_shutdown: bool = False, + ) -> futures.StreamingPullFuture: """Asynchronously start receiving messages on a given subscription. This method starts a background thread to begin pulling messages from @@ -174,6 +210,10 @@ def subscribe(self, subscription, callback, flow_control=(), scheduler=None): settings may lead to faster throughput for messages that do not take a long time to process. + The ``use_legacy_flow_control`` argument disables enforcing flow control + settings at the Cloud Pub/Sub server, and only the client side flow control + will be enforced. + This method starts the receiver in the background and returns a *Future* representing its execution. Waiting on the future (calling ``result()``) will block forever or until a non-recoverable error @@ -208,32 +248,52 @@ def callback(message): try: future.result() except KeyboardInterrupt: - future.cancel() + future.cancel() # Trigger the shutdown. + future.result() # Block until the shutdown is complete. Args: - subscription (str): The name of the subscription. The - subscription should have already been created (for example, - by using :meth:`create_subscription`). - callback (Callable[~google.cloud.pubsub_v1.subscriber.message.Message]): + subscription: + The name of the subscription. The subscription should have already been + created (for example, by using :meth:`create_subscription`). + callback: The callback function. This function receives the message as its only argument and will be called from a different thread/ process depending on the scheduling strategy. - flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow control - settings. Use this to prevent situations where you are + flow_control: + The flow control settings. Use this to prevent situations where you are inundated with too many messages at once. - scheduler (~google.cloud.pubsub_v1.subscriber.scheduler.Scheduler): An optional - *scheduler* to use when executing the callback. This controls - how callbacks are executed concurrently. This object must not be shared - across multiple SubscriberClients. + scheduler: + An optional *scheduler* to use when executing the callback. This + controls how callbacks are executed concurrently. This object must not + be shared across multiple ``SubscriberClient`` instances. + use_legacy_flow_control (bool): + If set to ``True``, flow control at the Cloud Pub/Sub server is disabled, + though client-side flow control is still enabled. If set to ``False`` + (default), both server-side and client-side flow control are enabled. + await_callbacks_on_shutdown: + If ``True``, after canceling the returned future, the latter's + ``result()`` method will block until the background stream and its + helper threads have been terminated, and all currently executing message + callbacks are done processing. + + If ``False`` (default), the returned future's ``result()`` method will + not block after canceling the future. The method will instead return + immediately after the background stream and its helper threads have been + terminated, but some of the message callback threads might still be + running at that point. Returns: - A :class:`~google.cloud.pubsub_v1.subscriber.futures.StreamingPullFuture` - instance that can be used to manage the background stream. + A future instance that can be used to manage the background stream. """ flow_control = types.FlowControl(*flow_control) manager = streaming_pull_manager.StreamingPullManager( - self, subscription, flow_control=flow_control, scheduler=scheduler + self, + subscription, + flow_control=flow_control, + scheduler=scheduler, + use_legacy_flow_control=use_legacy_flow_control, + await_callbacks_on_shutdown=await_callbacks_on_shutdown, ) future = futures.StreamingPullFuture(manager) @@ -242,7 +302,7 @@ def callback(message): return future - def close(self): + def close(self) -> None: """Close the underlying channel to release socket resources. After a channel has been closed, the client instance cannot be used @@ -250,9 +310,13 @@ def close(self): This method is idempotent. """ - self.api.transport.channel.close() + transport = cast("SubscriberGrpcTransport", self._transport) + transport.grpc_channel.close() + self._closed = True - def __enter__(self): + def __enter__(self) -> "Client": + if self._closed: + raise RuntimeError("Closed subscriber cannot be used as context manager.") return self def __exit__(self, exc_type, exc_val, exc_tb): diff --git a/google/cloud/pubsub_v1/subscriber/exceptions.py b/google/cloud/pubsub_v1/subscriber/exceptions.py new file mode 100644 index 000000000..a5dad31a9 --- /dev/null +++ b/google/cloud/pubsub_v1/subscriber/exceptions.py @@ -0,0 +1,44 @@ +# Copyright 2017, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from enum import Enum +from google.api_core.exceptions import GoogleAPICallError +from typing import Optional + + +class AcknowledgeStatus(Enum): + SUCCESS = 1 + PERMISSION_DENIED = 2 + FAILED_PRECONDITION = 3 + INVALID_ACK_ID = 4 + OTHER = 5 + + +class AcknowledgeError(GoogleAPICallError): + """Error during ack/modack/nack operation on exactly-once-enabled subscription.""" + + def __init__(self, error_code: AcknowledgeStatus, info: Optional[str]): + self.error_code = error_code + self.info = info + message = None + if info: + message = str(self.error_code) + " : " + str(self.info) + else: + message = str(self.error_code) + super(AcknowledgeError, self).__init__(message) + + +__all__ = ("AcknowledgeError",) diff --git a/google/cloud/pubsub_v1/subscriber/futures.py b/google/cloud/pubsub_v1/subscriber/futures.py index f9fdd76ab..4c46c6813 100644 --- a/google/cloud/pubsub_v1/subscriber/futures.py +++ b/google/cloud/pubsub_v1/subscriber/futures.py @@ -14,7 +14,17 @@ from __future__ import absolute_import +import typing +from typing import Any +from typing import Union + from google.cloud.pubsub_v1 import futures +from google.cloud.pubsub_v1.subscriber.exceptions import AcknowledgeStatus + +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager import ( + StreamingPullManager, + ) class StreamingPullFuture(futures.Future): @@ -26,13 +36,13 @@ class StreamingPullFuture(futures.Future): the calling thread to block indefinitely. """ - def __init__(self, manager): + def __init__(self, manager: "StreamingPullManager"): super(StreamingPullFuture, self).__init__() - self._manager = manager - self._manager.add_close_callback(self._on_close_callback) - self._cancelled = False + self.__manager = manager + self.__manager.add_close_callback(self._on_close_callback) + self.__cancelled = False - def _on_close_callback(self, manager, result): + def _on_close_callback(self, manager: "StreamingPullManager", result: Any): if self.done(): # The future has already been resolved in a different thread, # nothing to do on the streaming pull manager shutdown. @@ -43,16 +53,73 @@ def _on_close_callback(self, manager, result): else: self.set_exception(result) - def cancel(self): + def cancel(self) -> bool: """Stops pulling messages and shutdowns the background thread consuming messages. + + The method always returns ``True``, as the shutdown is always initiated. + However, if the background stream is already being shut down or the shutdown + has completed, this method is a no-op. + + .. versionchanged:: 2.4.1 + The method does not block anymore, it just triggers the shutdown and returns + immediately. To block until the background stream is terminated, call + :meth:`result()` after cancelling the future. + + .. versionchanged:: 2.10.0 + The method always returns ``True`` instead of ``None``. """ - self._cancelled = True - return self._manager.close() + # NOTE: We circumvent the base future's self._state to track the cancellation + # state, as this state has different meaning with streaming pull futures. + self.__cancelled = True + self.__manager.close() + return True - def cancelled(self): + def cancelled(self) -> bool: + """ + Returns: + ``True`` if the subscription has been cancelled. """ - returns: - bool: ``True`` if the subscription has been cancelled. + return self.__cancelled + + +class Future(futures.Future): + """This future object is for subscribe-side calls. + + Calling :meth:`result` will resolve the future by returning the message + ID, unless an error occurs. + """ + + def cancel(self) -> bool: + """Actions in Pub/Sub generally may not be canceled. + + This method always returns ``False``. + """ + return False + + def cancelled(self) -> bool: + """Actions in Pub/Sub generally may not be canceled. + + This method always returns ``False``. + """ + return False + + def result(self, timeout: Union[int, float, None] = None) -> AcknowledgeStatus: + """Return a success code or raise an exception. + + This blocks until the operation completes successfully and + returns the error code unless an exception is raised. + + Args: + timeout: The number of seconds before this call + times out and raises TimeoutError. + + Returns: + AcknowledgeStatus.SUCCESS if the operation succeeded. + + Raises: + concurrent.futures.TimeoutError: If the request times out. + AcknowledgeError: If the operation did not succeed for another + reason. """ - return self._cancelled + return super().result(timeout=timeout) diff --git a/google/cloud/pubsub_v1/subscriber/message.py b/google/cloud/pubsub_v1/subscriber/message.py index 864d697e0..aa715ac67 100644 --- a/google/cloud/pubsub_v1/subscriber/message.py +++ b/google/cloud/pubsub_v1/subscriber/message.py @@ -14,13 +14,27 @@ from __future__ import absolute_import -import datetime +import datetime as dt import json +import logging import math import time +import typing +from typing import Optional, Callable -from google.api_core import datetime_helpers from google.cloud.pubsub_v1.subscriber._protocol import requests +from google.cloud.pubsub_v1.subscriber import futures +from google.cloud.pubsub_v1.subscriber.exceptions import AcknowledgeStatus +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) + + +if typing.TYPE_CHECKING: # pragma: NO COVER + import datetime + import queue + from google.cloud.pubsub_v1 import types + from google.protobuf.internal import containers _MESSAGE_REPR = """\ @@ -30,19 +44,25 @@ attributes: {} }}""" +_ACK_NACK_LOGGER = logging.getLogger("ack-nack") + +_SUCCESS_FUTURE = futures.Future() +_SUCCESS_FUTURE.set_result(AcknowledgeStatus.SUCCESS) -def _indent(lines, prefix=" "): + +def _indent(lines: str, prefix: str = " ") -> str: """Indent some text. Note that this is present as ``textwrap.indent``, but not in Python 2. Args: - lines (str): The newline delimited string to be indented. - prefix (Optional[str]): The prefix to indent each line with. Default - to two spaces. + lines: + The newline delimited string to be indented. + prefix: + The prefix to indent each line with. Defaults to two spaces. Returns: - str: The newly indented content. + The newly indented content. """ indented = [] for line in lines.split("\n"): @@ -61,17 +81,28 @@ class Message(object): :class:`~.pubsub_v1.subscriber._consumer.Consumer`.) Attributes: - message_id (str): The message ID. In general, you should not need - to use this directly. - data (bytes): The data in the message. Note that this will be a - :class:`bytes`, not a text string. - attributes (.ScalarMapContainer): The attributes sent along with the - message. See :attr:`attributes` for more information on this type. - publish_time (datetime): The time that this message was originally - published. + message_id (str): + The message ID. In general, you should not need to use this directly. + data (bytes): + The data in the message. Note that this will be a :class:`bytes`, + not a text string. + attributes (MutableMapping[str, str]): + The attributes sent along with the message. See :attr:`attributes` for more + information on this type. + publish_time (google.protobuf.timestamp_pb2.Timestamp): + The time that this message was originally published. + opentelemetry_data (google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry.SubscribeOpenTelemetry) + Open Telemetry data associated with this message. None if Open Telemetry is not enabled. """ - def __init__(self, message, ack_id, delivery_attempt, request_queue): + def __init__( + self, + message: "types.PubsubMessage._meta._pb", # type: ignore + ack_id: str, + delivery_attempt: int, + request_queue: "queue.Queue", + exactly_once_delivery_enabled_func: Callable[[], bool] = lambda: False, + ): """Construct the Message. .. note:: @@ -80,20 +111,28 @@ def __init__(self, message, ack_id, delivery_attempt, request_queue): responsibility of :class:`BasePolicy` subclasses to do so. Args: - message (~.pubsub_v1.types.PubsubMessage): The message received - from Pub/Sub. - ack_id (str): The ack_id received from Pub/Sub. - delivery_attempt (int): The delivery attempt counter received - from Pub/Sub if a DeadLetterPolicy is set on the subscription, - and zero otherwise. - request_queue (queue.Queue): A queue provided by the policy that - can accept requests; the policy is responsible for handling - those requests. + message (types.PubsubMessage._meta._pb): + The message received from Pub/Sub. For performance reasons it should be + the raw protobuf message normally wrapped by + :class:`~pubsub_v1.types.PubsubMessage`. A raw message can be obtained + from a :class:`~pubsub_v1.types.PubsubMessage` instance through the + latter's ``._pb`` attribute. + ack_id (str): + The ack_id received from Pub/Sub. + delivery_attempt (int): + The delivery attempt counter received from Pub/Sub if a DeadLetterPolicy + is set on the subscription, and zero otherwise. + request_queue (queue.Queue): + A queue provided by the policy that can accept requests; the policy is + responsible for handling those requests. + exactly_once_delivery_enabled_func (Callable[[], bool]): + A Callable that returns whether exactly-once delivery is currently-enabled. Defaults to a lambda that always returns False. """ self._message = message self._ack_id = ack_id self._delivery_attempt = delivery_attempt if delivery_attempt > 0 else None self._request_queue = request_queue + self._exactly_once_delivery_enabled_func = exactly_once_delivery_enabled_func self.message_id = message.message_id # The instantiation time is the time that this message @@ -101,6 +140,21 @@ def __init__(self, message, ack_id, delivery_attempt, request_queue): # the default lease deadline. self._received_timestamp = time.time() + # Store the message attributes directly to speed up attribute access, i.e. + # to avoid two lookups if self._message. pattern was used in + # properties. + self._attributes = message.attributes + self._data = message.data + self._publish_time = dt.datetime.fromtimestamp( + message.publish_time.seconds + message.publish_time.nanos / 1e9, + tz=dt.timezone.utc, + ) + self._ordering_key = message.ordering_key + self._size = message.ByteSize() + + # None if Open Telemetry is disabled. Else contains OpenTelemetry data. + self._opentelemetry_data: Optional[SubscribeOpenTelemetry] = None + def __repr__(self): # Get an abbreviated version of the data. abbv_data = self._message.data @@ -116,12 +170,20 @@ def __repr__(self): return _MESSAGE_REPR.format(abbv_data, str(self.ordering_key), pretty_attrs) @property - def attributes(self): + def opentelemetry_data(self): + return self._opentelemetry_data # pragma: NO COVER + + @opentelemetry_data.setter + def opentelemetry_data(self, data): + self._opentelemetry_data = data # pragma: NO COVER + + @property + def attributes(self) -> "containers.ScalarMap": """Return the attributes of the underlying Pub/Sub Message. .. warning:: - A ``ScalarMapContainer`` behaves slightly differently than a + A ``ScalarMap`` behaves slightly differently than a ``dict``. For a Pub / Sub message this is a ``string->string`` map. When trying to access a value via ``map['key']``, if the key is not in the map, then the default value for the string type will @@ -129,51 +191,48 @@ def attributes(self): to just cast the map to a ``dict`` or to one use ``map.get``. Returns: - .ScalarMapContainer: The message's attributes. This is a + containers.ScalarMap: The message's attributes. This is a ``dict``-like object provided by ``google.protobuf``. """ - return self._message.attributes + return self._attributes @property - def data(self): + def data(self) -> bytes: """Return the data for the underlying Pub/Sub Message. Returns: - bytes: The message data. This is always a bytestring; if you - want a text string, call :meth:`bytes.decode`. + bytes: The message data. This is always a bytestring; if you want + a text string, call :meth:`bytes.decode`. """ - return self._message.data + return self._data @property - def publish_time(self): + def publish_time(self) -> "datetime.datetime": """Return the time that the message was originally published. Returns: - datetime: The date and time that the message was published. + datetime.datetime: The date and time that the message was + published. """ - timestamp = self._message.publish_time - delta = datetime.timedelta( - seconds=timestamp.seconds, microseconds=timestamp.nanos // 1000 - ) - return datetime_helpers._UTC_EPOCH + delta + return self._publish_time @property - def ordering_key(self): - """str: the ordering key used to publish the message.""" - return self._message.ordering_key + def ordering_key(self) -> str: + """The ordering key used to publish the message.""" + return self._ordering_key @property - def size(self): + def size(self) -> int: """Return the size of the underlying message, in bytes.""" - return self._message.ByteSize() + return self._size @property - def ack_id(self): - """str: the ID used to ack the message.""" + def ack_id(self) -> str: + """the ID used to ack the message.""" return self._ack_id @property - def delivery_attempt(self): + def delivery_attempt(self) -> Optional[int]: """The delivery attempt counter is 1 + (the sum of number of NACKs and number of ack_deadline exceeds) for this message. It is set to None if a DeadLetterPolicy is not set on the subscription. @@ -188,11 +247,11 @@ def delivery_attempt(self): is calculated at best effort and is approximate. Returns: - Optional[int]: The delivery attempt counter or None. + Optional[int]: The delivery attempt counter or ``None``. """ return self._delivery_attempt - def ack(self): + def ack(self) -> None: """Acknowledge the given message. Acknowledging a message in Pub/Sub means that you are done @@ -204,19 +263,106 @@ def ack(self): .. warning:: Acks in Pub/Sub are best effort. You should always ensure that your processing code is idempotent, as you may - receive any given message more than once. + receive any given message more than once. If you need strong + guarantees about acks and re-deliveres, enable exactly-once + delivery on your subscription and use the `ack_with_response` + method instead. Exactly once delivery is a preview feature. + For more details, see: + https://cloud.google.com/pubsub/docs/exactly-once-delivery." + """ + if self.opentelemetry_data: + self.opentelemetry_data.add_process_span_event("ack called") + self.opentelemetry_data.end_process_span() time_to_ack = math.ceil(time.time() - self._received_timestamp) self._request_queue.put( requests.AckRequest( + message_id=self.message_id, ack_id=self._ack_id, byte_size=self.size, time_to_ack=time_to_ack, ordering_key=self.ordering_key, + future=None, + opentelemetry_data=self.opentelemetry_data, ) ) + _ACK_NACK_LOGGER.debug( + "Called ack for message (id=%s, ack_id=%s, ordering_key=%s)", + self.message_id, + self.ack_id, + self.ordering_key, + ) + + def ack_with_response(self) -> "futures.Future": + """Acknowledge the given message. - def drop(self): + Acknowledging a message in Pub/Sub means that you are done + with it, and it will not be delivered to this subscription again. + You should avoid acknowledging messages until you have + *finished* processing them, so that in the event of a failure, + you receive the message again. + + If exactly-once delivery is NOT enabled on the subscription, the + future returns immediately with an AcknowledgeStatus.SUCCESS. + Since acks in Cloud Pub/Sub are best effort when exactly-once + delivery is disabled, the message may be re-delivered. Because + re-deliveries are possible, you should ensure that your processing + code is idempotent, as you may receive any given message more than + once. + + If exactly-once delivery is enabled on the subscription, the + future returned by this method tracks the state of acknowledgement + operation. If the future completes successfully, the message is + guaranteed NOT to be re-delivered. Otherwise, the future will + contain an exception with more details about the failure and the + message may be re-delivered. + + Exactly once delivery is a preview feature. For more details, + see https://cloud.google.com/pubsub/docs/exactly-once-delivery." + + Returns: + futures.Future: A + :class:`~google.cloud.pubsub_v1.subscriber.futures.Future` + instance that conforms to Python Standard library's + :class:`~concurrent.futures.Future` interface (but not an + instance of that class). Call `result()` to get the result + of the operation; upon success, a + pubsub_v1.subscriber.exceptions.AcknowledgeStatus.SUCCESS + will be returned and upon an error, an + pubsub_v1.subscriber.exceptions.AcknowledgeError exception + will be thrown. + """ + _ACK_NACK_LOGGER.debug( + "Called ack for message (id=%s, ack_id=%s, ordering_key=%s, exactly_once=True)", + self.message_id, + self.ack_id, + self.ordering_key, + ) + if self.opentelemetry_data: + self.opentelemetry_data.add_process_span_event("ack called") + self.opentelemetry_data.end_process_span() + req_future: Optional[futures.Future] + if self._exactly_once_delivery_enabled_func(): + future = futures.Future() + req_future = future + else: + future = _SUCCESS_FUTURE + req_future = None + time_to_ack = math.ceil(time.time() - self._received_timestamp) + self._request_queue.put( + requests.AckRequest( + message_id=self.message_id, + ack_id=self._ack_id, + byte_size=self.size, + time_to_ack=time_to_ack, + ordering_key=self.ordering_key, + future=req_future, + opentelemetry_data=self.opentelemetry_data, + ) + ) + return future + + def drop(self) -> None: """Release the message from lease management. This informs the policy to no longer hold on to the lease for this @@ -235,32 +381,182 @@ def drop(self): ) ) - def modify_ack_deadline(self, seconds): + def modify_ack_deadline(self, seconds: int) -> None: """Resets the deadline for acknowledgement. New deadline will be the given value of seconds from now. - The default implementation handles this for you; you should not need - to manually deal with setting ack deadlines. The exception case is + The default implementation handles automatically modacking received messages for you; + you should not need to manually deal with setting ack deadlines. The exception case is + if you are implementing your own custom subclass of + :class:`~.pubsub_v1.subcriber._consumer.Consumer`. + + Args: + seconds (int): + The number of seconds to set the lease deadline to. This should be + between 0 and 600. Due to network latency, values below 10 are advised + against. + """ + self._request_queue.put( + requests.ModAckRequest( + message_id=self.message_id, + ack_id=self._ack_id, + seconds=seconds, + future=None, + opentelemetry_data=self.opentelemetry_data, + ) + ) + + def modify_ack_deadline_with_response(self, seconds: int) -> "futures.Future": + """Resets the deadline for acknowledgement and returns the response + status via a future. + + New deadline will be the given value of seconds from now. + + The default implementation handles automatically modacking received messages for you; + you should not need to manually deal with setting ack deadlines. The exception case is if you are implementing your own custom subclass of :class:`~.pubsub_v1.subcriber._consumer.Consumer`. + If exactly-once delivery is NOT enabled on the subscription, the + future returns immediately with an AcknowledgeStatus.SUCCESS. + Since modify-ack-deadline operations in Cloud Pub/Sub are best effort + when exactly-once delivery is disabled, the message may be re-delivered + within the set deadline. + + If exactly-once delivery is enabled on the subscription, the + future returned by this method tracks the state of the + modify-ack-deadline operation. If the future completes successfully, + the message is guaranteed NOT to be re-delivered within the new deadline. + Otherwise, the future will contain an exception with more details about + the failure and the message will be redelivered according to its + currently-set ack deadline. + + Exactly once delivery is a preview feature. For more details, + see https://cloud.google.com/pubsub/docs/exactly-once-delivery." + Args: - seconds (int): The number of seconds to set the lease deadline - to. This should be between 0 and 600. Due to network latency, - values below 10 are advised against. + seconds (int): + The number of seconds to set the lease deadline to. This should be + between 0 and 600. Due to network latency, values below 10 are advised + against. + Returns: + futures.Future: A + :class:`~google.cloud.pubsub_v1.subscriber.futures.Future` + instance that conforms to Python Standard library's + :class:`~concurrent.futures.Future` interface (but not an + instance of that class). Call `result()` to get the result + of the operation; upon success, a + pubsub_v1.subscriber.exceptions.AcknowledgeStatus.SUCCESS + will be returned and upon an error, an + pubsub_v1.subscriber.exceptions.AcknowledgeError exception + will be thrown. + """ + req_future: Optional[futures.Future] + if self._exactly_once_delivery_enabled_func(): + future = futures.Future() + req_future = future + else: + future = _SUCCESS_FUTURE + req_future = None + self._request_queue.put( - requests.ModAckRequest(ack_id=self._ack_id, seconds=seconds) + requests.ModAckRequest( + message_id=self.message_id, + ack_id=self._ack_id, + seconds=seconds, + future=req_future, + opentelemetry_data=self.opentelemetry_data, + ) ) - def nack(self): - """Decline to acknowldge the given message. + return future - This will cause the message to be re-delivered to the subscription. + def nack(self) -> None: + """Decline to acknowledge the given message. + + This will cause the message to be re-delivered to subscribers. Re-deliveries + may take place immediately or after a delay, and may arrive at this subscriber + or another. """ + _ACK_NACK_LOGGER.debug( + "Called nack for message (id=%s, ack_id=%s, ordering_key=%s, exactly_once=%s)", + self.message_id, + self.ack_id, + self.ordering_key, + self._exactly_once_delivery_enabled_func(), + ) + if self.opentelemetry_data: + self.opentelemetry_data.add_process_span_event("nack called") + self.opentelemetry_data.end_process_span() self._request_queue.put( requests.NackRequest( - ack_id=self._ack_id, byte_size=self.size, ordering_key=self.ordering_key + ack_id=self._ack_id, + byte_size=self.size, + ordering_key=self.ordering_key, + future=None, + opentelemetry_data=self.opentelemetry_data, ) ) + + def nack_with_response(self) -> "futures.Future": + """Decline to acknowledge the given message, returning the response status via + a future. + + This will cause the message to be re-delivered to subscribers. Re-deliveries + may take place immediately or after a delay, and may arrive at this subscriber + or another. + + If exactly-once delivery is NOT enabled on the subscription, the + future returns immediately with an AcknowledgeStatus.SUCCESS. + + If exactly-once delivery is enabled on the subscription, the + future returned by this method tracks the state of the + nack operation. If the future completes successfully, + the future's result will be an AcknowledgeStatus.SUCCESS. + Otherwise, the future will contain an exception with more details about + the failure. + + Exactly once delivery is a preview feature. For more details, + see https://cloud.google.com/pubsub/docs/exactly-once-delivery." + + Returns: + futures.Future: A + :class:`~google.cloud.pubsub_v1.subscriber.futures.Future` + instance that conforms to Python Standard library's + :class:`~concurrent.futures.Future` interface (but not an + instance of that class). Call `result()` to get the result + of the operation; upon success, a + pubsub_v1.subscriber.exceptions.AcknowledgeStatus.SUCCESS + will be returned and upon an error, an + pubsub_v1.subscriber.exceptions.AcknowledgeError exception + will be thrown. + + """ + if self.opentelemetry_data: + self.opentelemetry_data.add_process_span_event("nack called") + self.opentelemetry_data.end_process_span() + req_future: Optional[futures.Future] + if self._exactly_once_delivery_enabled_func(): + future = futures.Future() + req_future = future + else: + future = _SUCCESS_FUTURE + req_future = None + + self._request_queue.put( + requests.NackRequest( + ack_id=self._ack_id, + byte_size=self.size, + ordering_key=self.ordering_key, + future=req_future, + opentelemetry_data=self.opentelemetry_data, + ) + ) + + return future + + @property + def exactly_once_enabled(self): + return self._exactly_once_delivery_enabled_func() diff --git a/google/cloud/pubsub_v1/subscriber/scheduler.py b/google/cloud/pubsub_v1/subscriber/scheduler.py index ef2ef59cb..cc3393bd7 100644 --- a/google/cloud/pubsub_v1/subscriber/scheduler.py +++ b/google/cloud/pubsub_v1/subscriber/scheduler.py @@ -20,14 +20,17 @@ import abc import concurrent.futures +import queue import sys +import typing +from typing import Callable, List, Optional +import warnings -import six -from six.moves import queue +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud import pubsub_v1 -@six.add_metaclass(abc.ABCMeta) -class Scheduler(object): +class Scheduler(metaclass=abc.ABCMeta): """Abstract base class for schedulers. Schedulers are used to schedule callbacks asynchronously. @@ -35,7 +38,7 @@ class Scheduler(object): @property @abc.abstractmethod - def queue(self): + def queue(self) -> "queue.Queue": # pragma: NO COVER """Queue: A concurrency-safe queue specific to the underlying concurrency implementation. @@ -44,13 +47,13 @@ def queue(self): raise NotImplementedError @abc.abstractmethod - def schedule(self, callback, *args, **kwargs): + def schedule(self, callback: Callable, *args, **kwargs) -> None: # pragma: NO COVER """Schedule the callback to be called asynchronously. Args: - callback (Callable): The function to call. - args: Positional arguments passed to the function. - kwargs: Key-word arguments passed to the function. + callback: The function to call. + args: Positional arguments passed to the callback. + kwargs: Key-word arguments passed to the callback. Returns: None @@ -58,19 +61,30 @@ def schedule(self, callback, *args, **kwargs): raise NotImplementedError @abc.abstractmethod - def shutdown(self): + def shutdown( + self, await_msg_callbacks: bool = False + ) -> List["pubsub_v1.subscriber.message.Message"]: # pragma: NO COVER """Shuts down the scheduler and immediately end all pending callbacks. + + Args: + await_msg_callbacks: + If ``True``, the method will block until all currently executing + callbacks are done processing. If ``False`` (default), the + method will not wait for the currently running callbacks to complete. + + Returns: + The messages submitted to the scheduler that were not yet dispatched + to their callbacks. + It is assumed that each message was submitted to the scheduler as the + first positional argument to the provided callback. """ raise NotImplementedError -def _make_default_thread_pool_executor(): - # Python 2.7 and 3.6+ have the thread_name_prefix argument, which is useful - # for debugging. - executor_kwargs = {} - if sys.version_info[:2] == (2, 7) or sys.version_info >= (3, 6): - executor_kwargs["thread_name_prefix"] = "ThreadPoolExecutor-ThreadScheduler" - return concurrent.futures.ThreadPoolExecutor(max_workers=10, **executor_kwargs) +def _make_default_thread_pool_executor() -> concurrent.futures.ThreadPoolExecutor: + return concurrent.futures.ThreadPoolExecutor( + max_workers=10, thread_name_prefix="ThreadPoolExecutor-ThreadScheduler" + ) class ThreadScheduler(Scheduler): @@ -80,12 +94,15 @@ class ThreadScheduler(Scheduler): This scheduler is useful in typical I/O-bound message processing. Args: - executor(concurrent.futures.ThreadPoolExecutor): An optional executor - to use. If not specified, a default one will be created. + executor: + An optional executor to use. If not specified, a default one + will be created. """ - def __init__(self, executor=None): - self._queue = queue.Queue() + def __init__( + self, executor: Optional[concurrent.futures.ThreadPoolExecutor] = None + ): + self._queue: queue.Queue = queue.Queue() if executor is None: self._executor = _make_default_thread_pool_executor() else: @@ -97,28 +114,76 @@ def queue(self): and the scheduling thread.""" return self._queue - def schedule(self, callback, *args, **kwargs): + def schedule(self, callback: Callable, *args, **kwargs) -> None: """Schedule the callback to be called asynchronously in a thread pool. Args: - callback (Callable): The function to call. - args: Positional arguments passed to the function. - kwargs: Key-word arguments passed to the function. + callback: The function to call. + args: Positional arguments passed to the callback. + kwargs: Key-word arguments passed to the callback. Returns: None """ - self._executor.submit(callback, *args, **kwargs) + try: + self._executor.submit(callback, *args, **kwargs) + except RuntimeError: + warnings.warn( + "Scheduling a callback after executor shutdown.", + category=RuntimeWarning, + stacklevel=2, + ) + + def shutdown( + self, await_msg_callbacks: bool = False + ) -> List["pubsub_v1.subscriber.message.Message"]: + """Shut down the scheduler and immediately end all pending callbacks. - def shutdown(self): - """Shuts down the scheduler and immediately end all pending callbacks. + Args: + await_msg_callbacks: + If ``True``, the method will block until all currently executing + executor threads are done processing. If ``False`` (default), the + method will not wait for the currently running threads to complete. + + Returns: + The messages submitted to the scheduler that were not yet dispatched + to their callbacks. + It is assumed that each message was submitted to the scheduler as the + first positional argument to the provided callback. """ - # Drop all pending item from the executor. Without this, the executor - # will block until all pending items are complete, which is - # undesirable. + dropped_messages = [] + + # Drop all pending item from the executor. Without this, the executor will also + # try to process any pending work items before termination, which is undesirable. + # + # TODO: Replace the logic below by passing `cancel_futures=True` to shutdown() + # once we only need to support Python 3.9+. try: while True: - self._executor._work_queue.get(block=False) + work_item = self._executor._work_queue.get(block=False) + if work_item is None: # Exceutor in shutdown mode. + continue + + dropped_message = None + if sys.version_info < (3, 14): + # For Python < 3.14, work_item.args is a tuple of positional arguments. + # The message is expected to be the first argument. + if hasattr(work_item, "args") and work_item.args: + dropped_message = work_item.args[0] # type: ignore[index] + else: + # For Python >= 3.14, work_item.task is (fn, args, kwargs). + # The message is expected to be the first item in the args tuple (task[1]). + if ( + hasattr(work_item, "task") + and len(work_item.task) == 3 + and work_item.task[1] + ): + dropped_message = work_item.task[1][0] + + if dropped_message is not None: + dropped_messages.append(dropped_message) except queue.Empty: pass - self._executor.shutdown() + + self._executor.shutdown(wait=await_msg_callbacks) + return dropped_messages diff --git a/google/cloud/pubsub_v1/types.py b/google/cloud/pubsub_v1/types.py index b52b3ea60..6746e141a 100644 --- a/google/cloud/pubsub_v1/types.py +++ b/google/cloud/pubsub_v1/types.py @@ -16,12 +16,18 @@ import collections import enum +import inspect import sys +import typing +from typing import Dict, NamedTuple, Union -from google.api import http_pb2 -from google.iam.v1 import iam_policy_pb2 +import proto # type: ignore + +from google.api import http_pb2 # type: ignore +from google.api_core import gapic_v1 +from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 -from google.iam.v1.logging import audit_data_pb2 +from google.iam.v1.logging import audit_data_pb2 # type: ignore from google.protobuf import descriptor_pb2 from google.protobuf import duration_pb2 from google.protobuf import empty_pb2 @@ -29,7 +35,25 @@ from google.protobuf import timestamp_pb2 from google.api_core.protobuf_helpers import get_messages -from google.cloud.pubsub_v1.proto import pubsub_pb2 +from google.api_core.timeout import ConstantTimeout + +from google.pubsub_v1.types import pubsub as pubsub_gapic_types + + +if typing.TYPE_CHECKING: # pragma: NO COVER + from types import ModuleType + from google.pubsub_v1 import types as gapic_types + from google.pubsub_v1.services.publisher.client import OptionalRetry + + # TODO: Eventually implement OptionalTimeout in the GAPIC code generator and import + # it from the generated code. It's the same solution that is used for OptionalRetry. + # https://github.com/googleapis/gapic-generator-python/pull/1032/files + # https://github.com/googleapis/gapic-generator-python/pull/1065/files + if hasattr(gapic_v1.method, "_MethodDefault"): + # _MethodDefault was only added in google-api-core==2.2.2 + OptionalTimeout = Union[gapic_types.TimeoutType, gapic_v1.method._MethodDefault] + else: + OptionalTimeout = Union[gapic_types.TimeoutType, object] # type: ignore # Define the default values for batching. @@ -37,28 +61,39 @@ # This class is used when creating a publisher or subscriber client, and # these settings can be altered to tweak Pub/Sub behavior. # The defaults should be fine for most use cases. -BatchSettings = collections.namedtuple( - "BatchSettings", ["max_bytes", "max_latency", "max_messages"] -) -BatchSettings.__new__.__defaults__ = ( - 1 * 1000 * 1000, # max_bytes: 1 MB - 0.01, # max_latency: 10 ms - 100, # max_messages: 100 -) - -if sys.version_info >= (3, 5): - BatchSettings.__doc__ = "The settings for batch publishing the messages." - BatchSettings.max_bytes.__doc__ = ( +class BatchSettings(NamedTuple): + """The settings for batch publishing the messages. + + Attributes: + max_bytes (int): + The maximum total size of the messages to collect before automatically + publishing the batch, including any byte size overhead of the publish + request itself. The maximum value is bound by the server-side limit of + 10_000_000 bytes. Defaults to 1 MB. + max_latency (float): + The maximum number of seconds to wait for additional messages before + automatically publishing the batch. Defaults to 10ms. + max_messages (int): + The maximum number of messages to collect before automatically + publishing the batch. Defaults to 100. + """ + + max_bytes: int = 1 * 1000 * 1000 # 1 MB + ( "The maximum total size of the messages to collect before automatically " "publishing the batch, including any byte size overhead of the publish " "request itself. The maximum value is bound by the server-side limit of " "10_000_000 bytes." ) - BatchSettings.max_latency.__doc__ = ( + + max_latency: float = 0.01 # 10 ms + ( "The maximum number of seconds to wait for additional messages before " "automatically publishing the batch." ) - BatchSettings.max_messages.__doc__ = ( + + max_messages: int = 100 + ( "The maximum number of messages to collect before automatically " "publishing the batch." ) @@ -72,100 +107,198 @@ class LimitExceededBehavior(str, enum.Enum): ERROR = "error" -PublishFlowControl = collections.namedtuple( - "PublishFlowControl", ["message_limit", "byte_limit", "limit_exceeded_behavior"] -) -PublishFlowControl.__new__.__defaults__ = ( - 10 * BatchSettings.__new__.__defaults__[2], # message limit - 10 * BatchSettings.__new__.__defaults__[0], # byte limit - LimitExceededBehavior.IGNORE, # desired behavior -) +class PublishFlowControl(NamedTuple): + """The client flow control settings for message publishing. + + Attributes: + message_limit (int): + The maximum number of messages awaiting to be published. + Defaults to 1000. + byte_limit (int): + The maximum total size of messages awaiting to be published. + Defaults to 10MB. + limit_exceeded_behavior (LimitExceededBehavior): + The action to take when publish flow control limits are exceeded. + Defaults to LimitExceededBehavior.IGNORE. + """ + + message_limit: int = 10 * BatchSettings.__new__.__defaults__[2] # type: ignore + """The maximum number of messages awaiting to be published.""" + + byte_limit: int = 10 * BatchSettings.__new__.__defaults__[0] # type: ignore + """The maximum total size of messages awaiting to be published.""" + + limit_exceeded_behavior: LimitExceededBehavior = LimitExceededBehavior.IGNORE + """The action to take when publish flow control limits are exceeded.""" + + +# Define the default subscriber options. +# +# This class is used when creating a subscriber client to pass in options +# to enable/disable features. +class SubscriberOptions(NamedTuple): + """ + Options for the subscriber client. + Attributes: + enable_open_telemetry_tracing (bool): + Whether to enable OpenTelemetry tracing. Defaults to False. + """ + + enable_open_telemetry_tracing: bool = False + """ + Whether to enable OpenTelemetry tracing. + + Warning: traces are subject to change. The name and attributes of a span might + change without notice. Only use run traces interactively. Don't use in + automation. Running non-interactive traces can cause problems if the underlying + trace architecture changes without notice. + """ -if sys.version_info >= (3, 5): - PublishFlowControl.__doc__ = ( - "The client flow control settings for message publishing." - ) - PublishFlowControl.message_limit.__doc__ = ( - "The maximum number of messages awaiting to be published." - ) - PublishFlowControl.byte_limit.__doc__ = ( - "The maximum total size of messages awaiting to be published." - ) - PublishFlowControl.limit_exceeded_behavior.__doc__ = ( - "The action to take when publish flow control limits are exceeded." - ) # Define the default publisher options. # # This class is used when creating a publisher client to pass in options # to enable/disable features. -PublisherOptions = collections.namedtuple( - "PublisherConfig", ["enable_message_ordering", "flow_control"] -) -PublisherOptions.__new__.__defaults__ = ( - False, # enable_message_ordering: False - PublishFlowControl(), # default flow control settings -) - -if sys.version_info >= (3, 5): - PublisherOptions.__doc__ = "The options for the publisher client." - PublisherOptions.enable_message_ordering.__doc__ = ( - "Whether to order messages in a batch by a supplied ordering key." - "EXPERIMENTAL: Message ordering is an alpha feature that requires " - "special permissions to use. Please contact the Cloud Pub/Sub team for " - "more information." - ) - PublisherOptions.flow_control.__doc__ = ( +class PublisherOptions(NamedTuple): + """The options for the publisher client. + + Attributes: + enable_message_ordering (bool): + Whether to order messages in a batch by a supplied ordering key. + Defaults to false. + flow_control (PublishFlowControl): + Flow control settings for message publishing by the client. By default + the publisher client does not do any throttling. + retry (OptionalRetry): + Retry settings for message publishing by the client. This should be + an instance of :class:`google.api_core.retry.Retry`. + timeout (OptionalTimeout): + Timeout settings for message publishing by the client. It should be + compatible with :class:`~.pubsub_v1.types.TimeoutType`. + """ + + enable_message_ordering: bool = False + """Whether to order messages in a batch by a supplied ordering key.""" + + flow_control: PublishFlowControl = PublishFlowControl() + ( "Flow control settings for message publishing by the client. By default " "the publisher client does not do any throttling." ) + retry: "OptionalRetry" = gapic_v1.method.DEFAULT # use api_core default + ( + "Retry settings for message publishing by the client. This should be " + "an instance of :class:`google.api_core.retry.Retry`." + ) + + # Use ConstantTimeout instead of api_core default because the default + # value results in retries with zero deadline. + # Refer https://github.com/googleapis/python-api-core/issues/654 + timeout: "OptionalTimeout" = ConstantTimeout(60) + ( + "Timeout settings for message publishing by the client. It should be " + "compatible with :class:`~.pubsub_v1.types.TimeoutType`." + ) + + enable_open_telemetry_tracing: bool = False # disabled by default + """ + Open Telemetry tracing is enabled if this is set to True. + + Warning: traces are subject to change. The name and attributes of a span might + change without notice. Only use run traces interactively. Don't use in + automation. Running non-interactive traces can cause problems if the underlying + trace architecture changes without notice. + """ + # Define the type class and default values for flow control settings. # # This class is used when creating a publisher or subscriber client, and # these settings can be altered to tweak Pub/Sub behavior. # The defaults should be fine for most use cases. -FlowControl = collections.namedtuple( - "FlowControl", - [ - "max_bytes", - "max_messages", - "max_lease_duration", - "max_duration_per_lease_extension", - ], -) -FlowControl.__new__.__defaults__ = ( - 100 * 1024 * 1024, # max_bytes: 100mb - 1000, # max_messages: 1000 - 1 * 60 * 60, # max_lease_duration: 1 hour. - 0, # max_duration_per_lease_extension: disabled -) - -if sys.version_info >= (3, 5): - FlowControl.__doc__ = ( - "The settings for controlling the rate at which messages are pulled " - "with an asynchronous subscription." - ) - FlowControl.max_bytes.__doc__ = ( +class FlowControl(NamedTuple): + """The settings for controlling the rate at which messages are pulled + with an asynchronous subscription. + + Attributes: + max_bytes (int): + The maximum total size of received - but not yet processed - messages + before pausing the message stream. Defaults to 100 MiB. + max_messages (int): + The maximum number of received - but not yet processed - messages before + pausing the message stream. Defaults to 1000. + max_lease_duration (float): + The maximum amount of time in seconds to hold a lease on a message + before dropping it from the lease management. Defaults to 1 hour. + min_duration_per_lease_extension (float): + The min amount of time in seconds for a single lease extension attempt. + Must be between 10 and 600 (inclusive). Ignored by default, but set to + 60 seconds if the subscription has exactly-once delivery enabled. + max_duration_per_lease_extension (float): + The max amount of time in seconds for a single lease extension attempt. + Bounds the delay before a message redelivery if the subscriber + fails to extend the deadline. Must be between 10 and 600 (inclusive). Ignored + if set to 0. + """ + + max_bytes: int = 100 * 1024 * 1024 # 100 MiB + ( "The maximum total size of received - but not yet processed - messages " "before pausing the message stream." ) - FlowControl.max_messages.__doc__ = ( + + max_messages: int = 1000 + ( "The maximum number of received - but not yet processed - messages before " "pausing the message stream." ) - FlowControl.max_lease_duration.__doc__ = ( + + max_lease_duration: float = 1 * 60 * 60 # 1 hour + ( "The maximum amount of time in seconds to hold a lease on a message " "before dropping it from the lease management." ) - FlowControl.max_duration_per_lease_extension.__doc__ = ( + + min_duration_per_lease_extension: float = 0 + ( + "The min amount of time in seconds for a single lease extension attempt. " + "Must be between 10 and 600 (inclusive). Ignored by default, but set to " + "60 seconds if the subscription has exactly-once delivery enabled." + ) + + max_duration_per_lease_extension: float = 0 # disabled by default + ( "The max amount of time in seconds for a single lease extension attempt. " "Bounds the delay before a message redelivery if the subscriber " - "fails to extend the deadline." + "fails to extend the deadline. Must be between 10 and 600 (inclusive). Ignored " + "if set to 0." ) +# The current api core helper does not find new proto messages of type proto.Message, +# thus we need our own helper. Adjusted from +# https://github.com/googleapis/python-api-core/blob/8595f620e7d8295b6a379d6fd7979af3bef717e2/google/api_core/protobuf_helpers.py#L101-L118 +def _get_protobuf_messages(module: "ModuleType") -> Dict[str, proto.Message]: + """Discover all protobuf Message classes in a given import module. + + Args: + module (module): A Python module; :func:`dir` will be run against this + module to find Message subclasses. + + Returns: + dict[str, proto.Message]: A dictionary with the + Message class names as keys, and the Message subclasses themselves + as values. + """ + answer = collections.OrderedDict() + for name in dir(module): + candidate = getattr(module, name) + if inspect.isclass(candidate) and issubclass(candidate, proto.Message): + answer[name] = candidate + return answer + + _shared_modules = [ http_pb2, iam_policy_pb2, @@ -178,11 +311,15 @@ class LimitExceededBehavior(str, enum.Enum): timestamp_pb2, ] -_local_modules = [pubsub_pb2] - - -names = ["BatchSettings", "FlowControl"] +_local_modules = [pubsub_gapic_types] +names = [ + "BatchSettings", + "LimitExceededBehavior", + "PublishFlowControl", + "PublisherOptions", + "FlowControl", +] for module in _shared_modules: for name, message in get_messages(module).items(): @@ -190,7 +327,7 @@ class LimitExceededBehavior(str, enum.Enum): names.append(name) for module in _local_modules: - for name, message in get_messages(module).items(): + for name, message in _get_protobuf_messages(module).items(): message.__module__ = "google.cloud.pubsub_v1.types" setattr(sys.modules[__name__], name, message) names.append(name) diff --git a/google/pubsub/__init__.py b/google/pubsub/__init__.py new file mode 100644 index 000000000..d88449a53 --- /dev/null +++ b/google/pubsub/__init__.py @@ -0,0 +1,174 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.pubsub import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.pubsub_v1.services.publisher.client import PublisherClient +from google.pubsub_v1.services.publisher.async_client import PublisherAsyncClient +from google.pubsub_v1.services.schema_service.client import SchemaServiceClient +from google.pubsub_v1.services.schema_service.async_client import ( + SchemaServiceAsyncClient, +) +from google.pubsub_v1.services.subscriber.client import SubscriberClient +from google.pubsub_v1.services.subscriber.async_client import SubscriberAsyncClient + +from google.pubsub_v1.types.pubsub import AcknowledgeRequest +from google.pubsub_v1.types.pubsub import BigQueryConfig +from google.pubsub_v1.types.pubsub import CloudStorageConfig +from google.pubsub_v1.types.pubsub import CreateSnapshotRequest +from google.pubsub_v1.types.pubsub import DeadLetterPolicy +from google.pubsub_v1.types.pubsub import DeleteSnapshotRequest +from google.pubsub_v1.types.pubsub import DeleteSubscriptionRequest +from google.pubsub_v1.types.pubsub import DeleteTopicRequest +from google.pubsub_v1.types.pubsub import DetachSubscriptionRequest +from google.pubsub_v1.types.pubsub import DetachSubscriptionResponse +from google.pubsub_v1.types.pubsub import ExpirationPolicy +from google.pubsub_v1.types.pubsub import GetSnapshotRequest +from google.pubsub_v1.types.pubsub import GetSubscriptionRequest +from google.pubsub_v1.types.pubsub import GetTopicRequest +from google.pubsub_v1.types.pubsub import IngestionDataSourceSettings +from google.pubsub_v1.types.pubsub import IngestionFailureEvent +from google.pubsub_v1.types.pubsub import JavaScriptUDF +from google.pubsub_v1.types.pubsub import ListSnapshotsRequest +from google.pubsub_v1.types.pubsub import ListSnapshotsResponse +from google.pubsub_v1.types.pubsub import ListSubscriptionsRequest +from google.pubsub_v1.types.pubsub import ListSubscriptionsResponse +from google.pubsub_v1.types.pubsub import ListTopicSnapshotsRequest +from google.pubsub_v1.types.pubsub import ListTopicSnapshotsResponse +from google.pubsub_v1.types.pubsub import ListTopicsRequest +from google.pubsub_v1.types.pubsub import ListTopicsResponse +from google.pubsub_v1.types.pubsub import ListTopicSubscriptionsRequest +from google.pubsub_v1.types.pubsub import ListTopicSubscriptionsResponse +from google.pubsub_v1.types.pubsub import MessageStoragePolicy +from google.pubsub_v1.types.pubsub import MessageTransform +from google.pubsub_v1.types.pubsub import ModifyAckDeadlineRequest +from google.pubsub_v1.types.pubsub import ModifyPushConfigRequest +from google.pubsub_v1.types.pubsub import PlatformLogsSettings +from google.pubsub_v1.types.pubsub import PublishRequest +from google.pubsub_v1.types.pubsub import PublishResponse +from google.pubsub_v1.types.pubsub import PubsubMessage +from google.pubsub_v1.types.pubsub import PullRequest +from google.pubsub_v1.types.pubsub import PullResponse +from google.pubsub_v1.types.pubsub import PushConfig +from google.pubsub_v1.types.pubsub import ReceivedMessage +from google.pubsub_v1.types.pubsub import RetryPolicy +from google.pubsub_v1.types.pubsub import SchemaSettings +from google.pubsub_v1.types.pubsub import SeekRequest +from google.pubsub_v1.types.pubsub import SeekResponse +from google.pubsub_v1.types.pubsub import Snapshot +from google.pubsub_v1.types.pubsub import StreamingPullRequest +from google.pubsub_v1.types.pubsub import StreamingPullResponse +from google.pubsub_v1.types.pubsub import Subscription +from google.pubsub_v1.types.pubsub import Topic +from google.pubsub_v1.types.pubsub import UpdateSnapshotRequest +from google.pubsub_v1.types.pubsub import UpdateSubscriptionRequest +from google.pubsub_v1.types.pubsub import UpdateTopicRequest +from google.pubsub_v1.types.schema import CommitSchemaRequest +from google.pubsub_v1.types.schema import CreateSchemaRequest +from google.pubsub_v1.types.schema import DeleteSchemaRequest +from google.pubsub_v1.types.schema import DeleteSchemaRevisionRequest +from google.pubsub_v1.types.schema import GetSchemaRequest +from google.pubsub_v1.types.schema import ListSchemaRevisionsRequest +from google.pubsub_v1.types.schema import ListSchemaRevisionsResponse +from google.pubsub_v1.types.schema import ListSchemasRequest +from google.pubsub_v1.types.schema import ListSchemasResponse +from google.pubsub_v1.types.schema import RollbackSchemaRequest +from google.pubsub_v1.types.schema import Schema +from google.pubsub_v1.types.schema import ValidateMessageRequest +from google.pubsub_v1.types.schema import ValidateMessageResponse +from google.pubsub_v1.types.schema import ValidateSchemaRequest +from google.pubsub_v1.types.schema import ValidateSchemaResponse +from google.pubsub_v1.types.schema import Encoding +from google.pubsub_v1.types.schema import SchemaView + +__all__ = ( + "PublisherClient", + "PublisherAsyncClient", + "SchemaServiceClient", + "SchemaServiceAsyncClient", + "SubscriberClient", + "SubscriberAsyncClient", + "AcknowledgeRequest", + "BigQueryConfig", + "CloudStorageConfig", + "CreateSnapshotRequest", + "DeadLetterPolicy", + "DeleteSnapshotRequest", + "DeleteSubscriptionRequest", + "DeleteTopicRequest", + "DetachSubscriptionRequest", + "DetachSubscriptionResponse", + "ExpirationPolicy", + "GetSnapshotRequest", + "GetSubscriptionRequest", + "GetTopicRequest", + "IngestionDataSourceSettings", + "IngestionFailureEvent", + "JavaScriptUDF", + "ListSnapshotsRequest", + "ListSnapshotsResponse", + "ListSubscriptionsRequest", + "ListSubscriptionsResponse", + "ListTopicSnapshotsRequest", + "ListTopicSnapshotsResponse", + "ListTopicsRequest", + "ListTopicsResponse", + "ListTopicSubscriptionsRequest", + "ListTopicSubscriptionsResponse", + "MessageStoragePolicy", + "MessageTransform", + "ModifyAckDeadlineRequest", + "ModifyPushConfigRequest", + "PlatformLogsSettings", + "PublishRequest", + "PublishResponse", + "PubsubMessage", + "PullRequest", + "PullResponse", + "PushConfig", + "ReceivedMessage", + "RetryPolicy", + "SchemaSettings", + "SeekRequest", + "SeekResponse", + "Snapshot", + "StreamingPullRequest", + "StreamingPullResponse", + "Subscription", + "Topic", + "UpdateSnapshotRequest", + "UpdateSubscriptionRequest", + "UpdateTopicRequest", + "CommitSchemaRequest", + "CreateSchemaRequest", + "DeleteSchemaRequest", + "DeleteSchemaRevisionRequest", + "GetSchemaRequest", + "ListSchemaRevisionsRequest", + "ListSchemaRevisionsResponse", + "ListSchemasRequest", + "ListSchemasResponse", + "RollbackSchemaRequest", + "Schema", + "ValidateMessageRequest", + "ValidateMessageResponse", + "ValidateSchemaRequest", + "ValidateSchemaResponse", + "Encoding", + "SchemaView", +) diff --git a/setup.cfg b/google/pubsub/gapic_version.py similarity index 78% rename from setup.cfg rename to google/pubsub/gapic_version.py index c3a2b39f6..b31b170e1 100644 --- a/setup.cfg +++ b/google/pubsub/gapic_version.py @@ -1,19 +1,16 @@ # -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[bdist_wheel] -universal = 1 +# +__version__ = "2.34.0" # {x-release-please-version} diff --git a/google/pubsub/py.typed b/google/pubsub/py.typed new file mode 100644 index 000000000..1cec9a5ba --- /dev/null +++ b/google/pubsub/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-pubsub package uses inline types. diff --git a/google/pubsub_v1/__init__.py b/google/pubsub_v1/__init__.py new file mode 100644 index 000000000..00d6b495e --- /dev/null +++ b/google/pubsub_v1/__init__.py @@ -0,0 +1,276 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.pubsub_v1 import gapic_version as package_version + +import google.api_core as api_core +import sys + +__version__ = package_version.__version__ + +if sys.version_info >= (3, 8): # pragma: NO COVER + from importlib import metadata +else: # pragma: NO COVER + # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove + # this code path once we drop support for Python 3.7 + import importlib_metadata as metadata + + +from .services.publisher import PublisherClient +from .services.publisher import PublisherAsyncClient +from .services.schema_service import SchemaServiceClient +from .services.schema_service import SchemaServiceAsyncClient +from .services.subscriber import SubscriberClient +from .services.subscriber import SubscriberAsyncClient + +from .types.pubsub import AcknowledgeRequest +from .types.pubsub import BigQueryConfig +from .types.pubsub import CloudStorageConfig +from .types.pubsub import CreateSnapshotRequest +from .types.pubsub import DeadLetterPolicy +from .types.pubsub import DeleteSnapshotRequest +from .types.pubsub import DeleteSubscriptionRequest +from .types.pubsub import DeleteTopicRequest +from .types.pubsub import DetachSubscriptionRequest +from .types.pubsub import DetachSubscriptionResponse +from .types.pubsub import ExpirationPolicy +from .types.pubsub import GetSnapshotRequest +from .types.pubsub import GetSubscriptionRequest +from .types.pubsub import GetTopicRequest +from .types.pubsub import IngestionDataSourceSettings +from .types.pubsub import IngestionFailureEvent +from .types.pubsub import JavaScriptUDF +from .types.pubsub import ListSnapshotsRequest +from .types.pubsub import ListSnapshotsResponse +from .types.pubsub import ListSubscriptionsRequest +from .types.pubsub import ListSubscriptionsResponse +from .types.pubsub import ListTopicSnapshotsRequest +from .types.pubsub import ListTopicSnapshotsResponse +from .types.pubsub import ListTopicsRequest +from .types.pubsub import ListTopicsResponse +from .types.pubsub import ListTopicSubscriptionsRequest +from .types.pubsub import ListTopicSubscriptionsResponse +from .types.pubsub import MessageStoragePolicy +from .types.pubsub import MessageTransform +from .types.pubsub import ModifyAckDeadlineRequest +from .types.pubsub import ModifyPushConfigRequest +from .types.pubsub import PlatformLogsSettings +from .types.pubsub import PublishRequest +from .types.pubsub import PublishResponse +from .types.pubsub import PubsubMessage +from .types.pubsub import PullRequest +from .types.pubsub import PullResponse +from .types.pubsub import PushConfig +from .types.pubsub import ReceivedMessage +from .types.pubsub import RetryPolicy +from .types.pubsub import SchemaSettings +from .types.pubsub import SeekRequest +from .types.pubsub import SeekResponse +from .types.pubsub import Snapshot +from .types.pubsub import StreamingPullRequest +from .types.pubsub import StreamingPullResponse +from .types.pubsub import Subscription +from .types.pubsub import Topic +from .types.pubsub import UpdateSnapshotRequest +from .types.pubsub import UpdateSubscriptionRequest +from .types.pubsub import UpdateTopicRequest +from .types.schema import CommitSchemaRequest +from .types.schema import CreateSchemaRequest +from .types.schema import DeleteSchemaRequest +from .types.schema import DeleteSchemaRevisionRequest +from .types.schema import GetSchemaRequest +from .types.schema import ListSchemaRevisionsRequest +from .types.schema import ListSchemaRevisionsResponse +from .types.schema import ListSchemasRequest +from .types.schema import ListSchemasResponse +from .types.schema import RollbackSchemaRequest +from .types.schema import Schema +from .types.schema import ValidateMessageRequest +from .types.schema import ValidateMessageResponse +from .types.schema import ValidateSchemaRequest +from .types.schema import ValidateSchemaResponse +from .types.schema import Encoding +from .types.schema import SchemaView + +if hasattr(api_core, "check_python_version") and hasattr( + api_core, "check_dependency_versions" +): # pragma: NO COVER + api_core.check_python_version("google.pubsub_v1") # type: ignore + api_core.check_dependency_versions("google.pubsub_v1") # type: ignore +else: # pragma: NO COVER + # An older version of api_core is installed which does not define the + # functions above. We do equivalent checks manually. + try: + import warnings + import sys + + _py_version_str = sys.version.split()[0] + _package_label = "google.pubsub_v1" + if sys.version_info < (3, 9): + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) + if sys.version_info[:2] == (3, 9): + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) + + def parse_version_to_tuple(version_string: str): + """Safely converts a semantic version string to a comparable tuple of integers. + Example: "4.25.8" -> (4, 25, 8) + Ignores non-numeric parts and handles common version formats. + Args: + version_string: Version string in the format "x.y.z" or "x.y.z" + Returns: + Tuple of integers for the parsed version string. + """ + parts = [] + for part in version_string.split("."): + try: + parts.append(int(part)) + except ValueError: + # If it's a non-numeric part (e.g., '1.0.0b1' -> 'b1'), stop here. + # This is a simplification compared to 'packaging.parse_version', but sufficient + # for comparing strictly numeric semantic versions. + break + return tuple(parts) + + def _get_version(dependency_name): + try: + version_string: str = metadata.version(dependency_name) + parsed_version = parse_version_to_tuple(version_string) + return (parsed_version, version_string) + except Exception: + # Catch exceptions from metadata.version() (e.g., PackageNotFoundError) + # or errors during parse_version_to_tuple + return (None, "--") + + _dependency_package = "google.protobuf" + _next_supported_version = "4.25.8" + _next_supported_version_tuple = (4, 25, 8) + _recommendation = " (we recommend 6.x)" + (_version_used, _version_used_string) = _get_version(_dependency_package) + if _version_used and _version_used < _next_supported_version_tuple: + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) + except Exception: + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) + +__all__ = ( + "PublisherAsyncClient", + "SchemaServiceAsyncClient", + "SubscriberAsyncClient", + "AcknowledgeRequest", + "BigQueryConfig", + "CloudStorageConfig", + "CommitSchemaRequest", + "CreateSchemaRequest", + "CreateSnapshotRequest", + "DeadLetterPolicy", + "DeleteSchemaRequest", + "DeleteSchemaRevisionRequest", + "DeleteSnapshotRequest", + "DeleteSubscriptionRequest", + "DeleteTopicRequest", + "DetachSubscriptionRequest", + "DetachSubscriptionResponse", + "Encoding", + "ExpirationPolicy", + "GetSchemaRequest", + "GetSnapshotRequest", + "GetSubscriptionRequest", + "GetTopicRequest", + "IngestionDataSourceSettings", + "IngestionFailureEvent", + "JavaScriptUDF", + "ListSchemaRevisionsRequest", + "ListSchemaRevisionsResponse", + "ListSchemasRequest", + "ListSchemasResponse", + "ListSnapshotsRequest", + "ListSnapshotsResponse", + "ListSubscriptionsRequest", + "ListSubscriptionsResponse", + "ListTopicSnapshotsRequest", + "ListTopicSnapshotsResponse", + "ListTopicSubscriptionsRequest", + "ListTopicSubscriptionsResponse", + "ListTopicsRequest", + "ListTopicsResponse", + "MessageStoragePolicy", + "MessageTransform", + "ModifyAckDeadlineRequest", + "ModifyPushConfigRequest", + "PlatformLogsSettings", + "PublishRequest", + "PublishResponse", + "PublisherClient", + "PubsubMessage", + "PullRequest", + "PullResponse", + "PushConfig", + "ReceivedMessage", + "RetryPolicy", + "RollbackSchemaRequest", + "Schema", + "SchemaServiceClient", + "SchemaSettings", + "SchemaView", + "SeekRequest", + "SeekResponse", + "Snapshot", + "StreamingPullRequest", + "StreamingPullResponse", + "SubscriberClient", + "Subscription", + "Topic", + "UpdateSnapshotRequest", + "UpdateSubscriptionRequest", + "UpdateTopicRequest", + "ValidateMessageRequest", + "ValidateMessageResponse", + "ValidateSchemaRequest", + "ValidateSchemaResponse", +) diff --git a/google/pubsub_v1/gapic_metadata.json b/google/pubsub_v1/gapic_metadata.json new file mode 100644 index 000000000..4a8f51a51 --- /dev/null +++ b/google/pubsub_v1/gapic_metadata.json @@ -0,0 +1,591 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.pubsub_v1", + "protoPackage": "google.pubsub.v1", + "schema": "1.0", + "services": { + "Publisher": { + "clients": { + "grpc": { + "libraryClient": "PublisherClient", + "rpcs": { + "CreateTopic": { + "methods": [ + "create_topic" + ] + }, + "DeleteTopic": { + "methods": [ + "delete_topic" + ] + }, + "DetachSubscription": { + "methods": [ + "detach_subscription" + ] + }, + "GetTopic": { + "methods": [ + "get_topic" + ] + }, + "ListTopicSnapshots": { + "methods": [ + "list_topic_snapshots" + ] + }, + "ListTopicSubscriptions": { + "methods": [ + "list_topic_subscriptions" + ] + }, + "ListTopics": { + "methods": [ + "list_topics" + ] + }, + "Publish": { + "methods": [ + "publish" + ] + }, + "UpdateTopic": { + "methods": [ + "update_topic" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PublisherAsyncClient", + "rpcs": { + "CreateTopic": { + "methods": [ + "create_topic" + ] + }, + "DeleteTopic": { + "methods": [ + "delete_topic" + ] + }, + "DetachSubscription": { + "methods": [ + "detach_subscription" + ] + }, + "GetTopic": { + "methods": [ + "get_topic" + ] + }, + "ListTopicSnapshots": { + "methods": [ + "list_topic_snapshots" + ] + }, + "ListTopicSubscriptions": { + "methods": [ + "list_topic_subscriptions" + ] + }, + "ListTopics": { + "methods": [ + "list_topics" + ] + }, + "Publish": { + "methods": [ + "publish" + ] + }, + "UpdateTopic": { + "methods": [ + "update_topic" + ] + } + } + }, + "rest": { + "libraryClient": "PublisherClient", + "rpcs": { + "CreateTopic": { + "methods": [ + "create_topic" + ] + }, + "DeleteTopic": { + "methods": [ + "delete_topic" + ] + }, + "DetachSubscription": { + "methods": [ + "detach_subscription" + ] + }, + "GetTopic": { + "methods": [ + "get_topic" + ] + }, + "ListTopicSnapshots": { + "methods": [ + "list_topic_snapshots" + ] + }, + "ListTopicSubscriptions": { + "methods": [ + "list_topic_subscriptions" + ] + }, + "ListTopics": { + "methods": [ + "list_topics" + ] + }, + "Publish": { + "methods": [ + "publish" + ] + }, + "UpdateTopic": { + "methods": [ + "update_topic" + ] + } + } + } + } + }, + "SchemaService": { + "clients": { + "grpc": { + "libraryClient": "SchemaServiceClient", + "rpcs": { + "CommitSchema": { + "methods": [ + "commit_schema" + ] + }, + "CreateSchema": { + "methods": [ + "create_schema" + ] + }, + "DeleteSchema": { + "methods": [ + "delete_schema" + ] + }, + "DeleteSchemaRevision": { + "methods": [ + "delete_schema_revision" + ] + }, + "GetSchema": { + "methods": [ + "get_schema" + ] + }, + "ListSchemaRevisions": { + "methods": [ + "list_schema_revisions" + ] + }, + "ListSchemas": { + "methods": [ + "list_schemas" + ] + }, + "RollbackSchema": { + "methods": [ + "rollback_schema" + ] + }, + "ValidateMessage": { + "methods": [ + "validate_message" + ] + }, + "ValidateSchema": { + "methods": [ + "validate_schema" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SchemaServiceAsyncClient", + "rpcs": { + "CommitSchema": { + "methods": [ + "commit_schema" + ] + }, + "CreateSchema": { + "methods": [ + "create_schema" + ] + }, + "DeleteSchema": { + "methods": [ + "delete_schema" + ] + }, + "DeleteSchemaRevision": { + "methods": [ + "delete_schema_revision" + ] + }, + "GetSchema": { + "methods": [ + "get_schema" + ] + }, + "ListSchemaRevisions": { + "methods": [ + "list_schema_revisions" + ] + }, + "ListSchemas": { + "methods": [ + "list_schemas" + ] + }, + "RollbackSchema": { + "methods": [ + "rollback_schema" + ] + }, + "ValidateMessage": { + "methods": [ + "validate_message" + ] + }, + "ValidateSchema": { + "methods": [ + "validate_schema" + ] + } + } + }, + "rest": { + "libraryClient": "SchemaServiceClient", + "rpcs": { + "CommitSchema": { + "methods": [ + "commit_schema" + ] + }, + "CreateSchema": { + "methods": [ + "create_schema" + ] + }, + "DeleteSchema": { + "methods": [ + "delete_schema" + ] + }, + "DeleteSchemaRevision": { + "methods": [ + "delete_schema_revision" + ] + }, + "GetSchema": { + "methods": [ + "get_schema" + ] + }, + "ListSchemaRevisions": { + "methods": [ + "list_schema_revisions" + ] + }, + "ListSchemas": { + "methods": [ + "list_schemas" + ] + }, + "RollbackSchema": { + "methods": [ + "rollback_schema" + ] + }, + "ValidateMessage": { + "methods": [ + "validate_message" + ] + }, + "ValidateSchema": { + "methods": [ + "validate_schema" + ] + } + } + } + } + }, + "Subscriber": { + "clients": { + "grpc": { + "libraryClient": "SubscriberClient", + "rpcs": { + "Acknowledge": { + "methods": [ + "acknowledge" + ] + }, + "CreateSnapshot": { + "methods": [ + "create_snapshot" + ] + }, + "CreateSubscription": { + "methods": [ + "create_subscription" + ] + }, + "DeleteSnapshot": { + "methods": [ + "delete_snapshot" + ] + }, + "DeleteSubscription": { + "methods": [ + "delete_subscription" + ] + }, + "GetSnapshot": { + "methods": [ + "get_snapshot" + ] + }, + "GetSubscription": { + "methods": [ + "get_subscription" + ] + }, + "ListSnapshots": { + "methods": [ + "list_snapshots" + ] + }, + "ListSubscriptions": { + "methods": [ + "list_subscriptions" + ] + }, + "ModifyAckDeadline": { + "methods": [ + "modify_ack_deadline" + ] + }, + "ModifyPushConfig": { + "methods": [ + "modify_push_config" + ] + }, + "Pull": { + "methods": [ + "pull" + ] + }, + "Seek": { + "methods": [ + "seek" + ] + }, + "StreamingPull": { + "methods": [ + "streaming_pull" + ] + }, + "UpdateSnapshot": { + "methods": [ + "update_snapshot" + ] + }, + "UpdateSubscription": { + "methods": [ + "update_subscription" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SubscriberAsyncClient", + "rpcs": { + "Acknowledge": { + "methods": [ + "acknowledge" + ] + }, + "CreateSnapshot": { + "methods": [ + "create_snapshot" + ] + }, + "CreateSubscription": { + "methods": [ + "create_subscription" + ] + }, + "DeleteSnapshot": { + "methods": [ + "delete_snapshot" + ] + }, + "DeleteSubscription": { + "methods": [ + "delete_subscription" + ] + }, + "GetSnapshot": { + "methods": [ + "get_snapshot" + ] + }, + "GetSubscription": { + "methods": [ + "get_subscription" + ] + }, + "ListSnapshots": { + "methods": [ + "list_snapshots" + ] + }, + "ListSubscriptions": { + "methods": [ + "list_subscriptions" + ] + }, + "ModifyAckDeadline": { + "methods": [ + "modify_ack_deadline" + ] + }, + "ModifyPushConfig": { + "methods": [ + "modify_push_config" + ] + }, + "Pull": { + "methods": [ + "pull" + ] + }, + "Seek": { + "methods": [ + "seek" + ] + }, + "StreamingPull": { + "methods": [ + "streaming_pull" + ] + }, + "UpdateSnapshot": { + "methods": [ + "update_snapshot" + ] + }, + "UpdateSubscription": { + "methods": [ + "update_subscription" + ] + } + } + }, + "rest": { + "libraryClient": "SubscriberClient", + "rpcs": { + "Acknowledge": { + "methods": [ + "acknowledge" + ] + }, + "CreateSnapshot": { + "methods": [ + "create_snapshot" + ] + }, + "CreateSubscription": { + "methods": [ + "create_subscription" + ] + }, + "DeleteSnapshot": { + "methods": [ + "delete_snapshot" + ] + }, + "DeleteSubscription": { + "methods": [ + "delete_subscription" + ] + }, + "GetSnapshot": { + "methods": [ + "get_snapshot" + ] + }, + "GetSubscription": { + "methods": [ + "get_subscription" + ] + }, + "ListSnapshots": { + "methods": [ + "list_snapshots" + ] + }, + "ListSubscriptions": { + "methods": [ + "list_subscriptions" + ] + }, + "ModifyAckDeadline": { + "methods": [ + "modify_ack_deadline" + ] + }, + "ModifyPushConfig": { + "methods": [ + "modify_push_config" + ] + }, + "Pull": { + "methods": [ + "pull" + ] + }, + "Seek": { + "methods": [ + "seek" + ] + }, + "StreamingPull": { + "methods": [ + "streaming_pull" + ] + }, + "UpdateSnapshot": { + "methods": [ + "update_snapshot" + ] + }, + "UpdateSubscription": { + "methods": [ + "update_subscription" + ] + } + } + } + } + } + } +} diff --git a/google/pubsub_v1/gapic_version.py b/google/pubsub_v1/gapic_version.py new file mode 100644 index 000000000..b31b170e1 --- /dev/null +++ b/google/pubsub_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.34.0" # {x-release-please-version} diff --git a/google/pubsub_v1/py.typed b/google/pubsub_v1/py.typed new file mode 100644 index 000000000..1cec9a5ba --- /dev/null +++ b/google/pubsub_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-pubsub package uses inline types. diff --git a/google/pubsub_v1/services/__init__.py b/google/pubsub_v1/services/__init__.py new file mode 100644 index 000000000..cbf94b283 --- /dev/null +++ b/google/pubsub_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/google/cloud/__init__.py b/google/pubsub_v1/services/publisher/__init__.py similarity index 67% rename from google/cloud/__init__.py rename to google/pubsub_v1/services/publisher/__init__.py index 9a1b64a6d..6c1355801 100644 --- a/google/cloud/__init__.py +++ b/google/pubsub_v1/services/publisher/__init__.py @@ -1,24 +1,22 @@ # -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# +from .client import PublisherClient +from .async_client import PublisherAsyncClient -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) +__all__ = ( + "PublisherClient", + "PublisherAsyncClient", +) diff --git a/google/pubsub_v1/services/publisher/async_client.py b/google/pubsub_v1/services/publisher/async_client.py new file mode 100644 index 000000000..3767a460b --- /dev/null +++ b/google/pubsub_v1/services/publisher/async_client.py @@ -0,0 +1,1674 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.pubsub_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.pubsub_v1.services.publisher import pagers +from google.pubsub_v1.types import pubsub +from google.pubsub_v1.types import TimeoutType +from .transports.base import PublisherTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport +from .client import PublisherClient + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class PublisherAsyncClient: + """The service that an application uses to manipulate topics, + and to send messages to a topic. + """ + + _client: PublisherClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = PublisherClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PublisherClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = PublisherClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = PublisherClient._DEFAULT_UNIVERSE + + crypto_key_path = staticmethod(PublisherClient.crypto_key_path) + parse_crypto_key_path = staticmethod(PublisherClient.parse_crypto_key_path) + schema_path = staticmethod(PublisherClient.schema_path) + parse_schema_path = staticmethod(PublisherClient.parse_schema_path) + snapshot_path = staticmethod(PublisherClient.snapshot_path) + parse_snapshot_path = staticmethod(PublisherClient.parse_snapshot_path) + subscription_path = staticmethod(PublisherClient.subscription_path) + parse_subscription_path = staticmethod(PublisherClient.parse_subscription_path) + topic_path = staticmethod(PublisherClient.topic_path) + parse_topic_path = staticmethod(PublisherClient.parse_topic_path) + common_billing_account_path = staticmethod( + PublisherClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PublisherClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(PublisherClient.common_folder_path) + parse_common_folder_path = staticmethod(PublisherClient.parse_common_folder_path) + common_organization_path = staticmethod(PublisherClient.common_organization_path) + parse_common_organization_path = staticmethod( + PublisherClient.parse_common_organization_path + ) + common_project_path = staticmethod(PublisherClient.common_project_path) + parse_common_project_path = staticmethod(PublisherClient.parse_common_project_path) + common_location_path = staticmethod(PublisherClient.common_location_path) + parse_common_location_path = staticmethod( + PublisherClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublisherAsyncClient: The constructed client. + """ + return PublisherClient.from_service_account_info.__func__(PublisherAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublisherAsyncClient: The constructed client. + """ + return PublisherClient.from_service_account_file.__func__(PublisherAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PublisherClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PublisherTransport: + """Returns the transport used by the client instance. + + Returns: + PublisherTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = PublisherClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, PublisherTransport, Callable[..., PublisherTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the publisher async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PublisherTransport,Callable[..., PublisherTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PublisherTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PublisherClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.pubsub_v1.PublisherAsyncClient`.", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.pubsub.v1.Publisher", + "credentialsType": None, + }, + ) + + async def create_topic( + self, + request: Optional[Union[pubsub.Topic, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Topic: + r"""Creates the given topic with the given name. See the [resource + name rules] + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_create_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.Topic( + name="name_value", + ) + + # Make the request + response = await client.create_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.Topic, dict]]): + The request object. A topic resource. + name (:class:`str`): + Required. The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` + must start with a letter, and contain only letters + (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), + plus (``+``) or percent signs (``%``). It must be + between 3 and 255 characters in length, and it must not + start with ``"goog"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.Topic): + request = pubsub.Topic(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_topic + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_topic( + self, + request: Optional[Union[pubsub.UpdateTopicRequest, dict]] = None, + *, + topic: Optional[pubsub.Topic] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Topic: + r"""Updates an existing topic by updating the fields + specified in the update mask. Note that certain + properties of a topic are not modifiable. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_update_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + topic = pubsub_v1.Topic() + topic.name = "name_value" + + request = pubsub_v1.UpdateTopicRequest( + topic=topic, + ) + + # Make the request + response = await client.update_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.UpdateTopicRequest, dict]]): + The request object. Request for the UpdateTopic method. + topic (:class:`google.pubsub_v1.types.Topic`): + Required. The updated topic object. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Indicates which fields in the provided topic + to update. Must be specified and non-empty. Note that if + ``update_mask`` contains "message_storage_policy" but + the ``message_storage_policy`` is not set in the + ``topic`` provided above, then the updated value is + determined by the policy configured at the project or + organization level. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [topic, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.UpdateTopicRequest): + request = pubsub.UpdateTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_topic + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("topic.name", request.topic.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def publish( + self, + request: Optional[Union[pubsub.PublishRequest, dict]] = None, + *, + topic: Optional[str] = None, + messages: Optional[MutableSequence[pubsub.PubsubMessage]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.PublishResponse: + r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if + the topic does not exist. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_publish(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.PublishRequest( + topic="topic_value", + ) + + # Make the request + response = await client.publish(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.PublishRequest, dict]]): + The request object. Request for the Publish method. + topic (:class:`str`): + Required. The messages in the request will be published + on this topic. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + messages (:class:`MutableSequence[google.pubsub_v1.types.PubsubMessage]`): + Required. The messages to publish. + This corresponds to the ``messages`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.PublishResponse: + Response for the Publish method. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [topic, messages] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.PublishRequest): + request = pubsub.PublishRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + if messages: + request.messages.extend(messages) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.publish] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_topic( + self, + request: Optional[Union[pubsub.GetTopicRequest, dict]] = None, + *, + topic: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Topic: + r"""Gets the configuration of a topic. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_get_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetTopicRequest( + topic="topic_value", + ) + + # Make the request + response = await client.get_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.GetTopicRequest, dict]]): + The request object. Request for the GetTopic method. + topic (:class:`str`): + Required. The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [topic] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.GetTopicRequest): + request = pubsub.GetTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_topic + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_topics( + self, + request: Optional[Union[pubsub.ListTopicsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTopicsAsyncPager: + r"""Lists matching topics. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_list_topics(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_topics(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.ListTopicsRequest, dict]]): + The request object. Request for the ``ListTopics`` method. + project (:class:`str`): + Required. The name of the project in which to list + topics. Format is ``projects/{project-id}``. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.services.publisher.pagers.ListTopicsAsyncPager: + Response for the ListTopics method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ListTopicsRequest): + request = pubsub.ListTopicsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_topics + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTopicsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_topic_subscriptions( + self, + request: Optional[Union[pubsub.ListTopicSubscriptionsRequest, dict]] = None, + *, + topic: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTopicSubscriptionsAsyncPager: + r"""Lists the names of the attached subscriptions on this + topic. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_list_topic_subscriptions(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSubscriptionsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_subscriptions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.ListTopicSubscriptionsRequest, dict]]): + The request object. Request for the ``ListTopicSubscriptions`` method. + topic (:class:`str`): + Required. The name of the topic that subscriptions are + attached to. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsAsyncPager: + Response for the ListTopicSubscriptions method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [topic] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ListTopicSubscriptionsRequest): + request = pubsub.ListTopicSubscriptionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_topic_subscriptions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTopicSubscriptionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_topic_snapshots( + self, + request: Optional[Union[pubsub.ListTopicSnapshotsRequest, dict]] = None, + *, + topic: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTopicSnapshotsAsyncPager: + r"""Lists the names of the snapshots on this topic. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_list_topic_snapshots(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSnapshotsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_snapshots(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.ListTopicSnapshotsRequest, dict]]): + The request object. Request for the ``ListTopicSnapshots`` method. + topic (:class:`str`): + Required. The name of the topic that snapshots are + attached to. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsAsyncPager: + Response for the ListTopicSnapshots method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [topic] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ListTopicSnapshotsRequest): + request = pubsub.ListTopicSnapshotsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_topic_snapshots + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTopicSnapshotsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_topic( + self, + request: Optional[Union[pubsub.DeleteTopicRequest, dict]] = None, + *, + topic: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if + the topic does not exist. After a topic is deleted, a new topic + may be created with the same name; this is an entirely new topic + with none of the old configuration or subscriptions. Existing + subscriptions to this topic are not deleted, but their ``topic`` + field is set to ``_deleted-topic_``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_delete_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteTopicRequest( + topic="topic_value", + ) + + # Make the request + await client.delete_topic(request=request) + + Args: + request (Optional[Union[google.pubsub_v1.types.DeleteTopicRequest, dict]]): + The request object. Request for the ``DeleteTopic`` method. + topic (:class:`str`): + Required. Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [topic] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.DeleteTopicRequest): + request = pubsub.DeleteTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_topic + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def detach_subscription( + self, + request: Optional[Union[pubsub.DetachSubscriptionRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.DetachSubscriptionResponse: + r"""Detaches a subscription from this topic. All messages retained + in the subscription are dropped. Subsequent ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. If + the subscription is a push subscription, pushes to the endpoint + will stop. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_detach_subscription(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DetachSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.detach_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.DetachSubscriptionRequest, dict]]): + The request object. Request for the DetachSubscription + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.DetachSubscriptionResponse: + Response for the DetachSubscription + method. Reserved for future use. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.DetachSubscriptionRequest): + request = pubsub.DetachSubscriptionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.detach_subscription + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "PublisherAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("PublisherAsyncClient",) diff --git a/google/pubsub_v1/services/publisher/client.py b/google/pubsub_v1/services/publisher/client.py new file mode 100644 index 000000000..27ed4dce8 --- /dev/null +++ b/google/pubsub_v1/services/publisher/client.py @@ -0,0 +1,2195 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import functools +import os +import re +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.pubsub_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import timeout as timeouts # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.pubsub_v1.services.publisher import pagers +from google.pubsub_v1.types import pubsub +from google.pubsub_v1.types import TimeoutType + +import grpc +from .transports.base import PublisherTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import PublisherGrpcTransport +from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport +from .transports.rest import PublisherRestTransport + + +class PublisherClientMeta(type): + """Metaclass for the Publisher client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[PublisherTransport]] + _transport_registry["grpc"] = PublisherGrpcTransport + _transport_registry["grpc_asyncio"] = PublisherGrpcAsyncIOTransport + _transport_registry["rest"] = PublisherRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[PublisherTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PublisherClient(metaclass=PublisherClientMeta): + """The service that an application uses to manipulate topics, + and to send messages to a topic. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ) + + SERVICE_ADDRESS = "pubsub.googleapis.com:443" + """The default address of the service.""" + + DEFAULT_ENDPOINT = "pubsub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "pubsub.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublisherClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublisherClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PublisherTransport: + """Returns the transport used by the client instance. + + Returns: + PublisherTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str, str]: + """Parses a crypto_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def schema_path( + project: str, + schema: str, + ) -> str: + """Returns a fully-qualified schema string.""" + return "projects/{project}/schemas/{schema}".format( + project=project, + schema=schema, + ) + + @staticmethod + def parse_schema_path(path: str) -> Dict[str, str]: + """Parses a schema path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/schemas/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def snapshot_path( + project: str, + snapshot: str, + ) -> str: + """Returns a fully-qualified snapshot string.""" + return "projects/{project}/snapshots/{snapshot}".format( + project=project, + snapshot=snapshot, + ) + + @staticmethod + def parse_snapshot_path(path: str) -> Dict[str, str]: + """Parses a snapshot path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/snapshots/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def subscription_path( + project: str, + subscription: str, + ) -> str: + """Returns a fully-qualified subscription string.""" + return "projects/{project}/subscriptions/{subscription}".format( + project=project, + subscription=subscription, + ) + + @staticmethod + def parse_subscription_path(path: str) -> Dict[str, str]: + """Parses a subscription path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/subscriptions/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def topic_path( + project: str, + topic: str, + ) -> str: + """Returns a fully-qualified topic string.""" + return "projects/{project}/topics/{topic}".format( + project=project, + topic=topic, + ) + + @staticmethod + def parse_topic_path(path: str) -> Dict[str, str]: + """Parses a topic path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = PublisherClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert: + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = PublisherClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert, use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = PublisherClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = PublisherClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = PublisherClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = PublisherClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, PublisherTransport, Callable[..., PublisherTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the publisher client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,PublisherTransport,Callable[..., PublisherTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PublisherTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = PublisherClient._read_environment_variables() + self._client_cert_source = PublisherClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = PublisherClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, PublisherTransport) + if transport_provided: + # transport is a PublisherTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(PublisherTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or PublisherClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[PublisherTransport], Callable[..., PublisherTransport] + ] = ( + PublisherClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., PublisherTransport], transport) + ) + # initialize with the provided callable or the passed in class + + emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") + if emulator_host: + if issubclass(transport_init, type(self)._transport_registry["grpc"]): # type: ignore + channel = grpc.insecure_channel(target=emulator_host) + else: + channel = grpc.aio.insecure_channel(target=emulator_host) + transport_init = functools.partial(transport_init, channel=channel) + + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.pubsub_v1.PublisherClient`.", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.pubsub.v1.Publisher", + "credentialsType": None, + }, + ) + + def create_topic( + self, + request: Optional[Union[pubsub.Topic, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Topic: + r"""Creates the given topic with the given name. See the [resource + name rules] + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_create_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.Topic( + name="name_value", + ) + + # Make the request + response = client.create_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.Topic, dict]): + The request object. A topic resource. + name (str): + Required. The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` + must start with a letter, and contain only letters + (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), + plus (``+``) or percent signs (``%``). It must be + between 3 and 255 characters in length, and it must not + start with ``"goog"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.Topic): + request = pubsub.Topic(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_topic( + self, + request: Optional[Union[pubsub.UpdateTopicRequest, dict]] = None, + *, + topic: Optional[pubsub.Topic] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Topic: + r"""Updates an existing topic by updating the fields + specified in the update mask. Note that certain + properties of a topic are not modifiable. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_update_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + topic = pubsub_v1.Topic() + topic.name = "name_value" + + request = pubsub_v1.UpdateTopicRequest( + topic=topic, + ) + + # Make the request + response = client.update_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.UpdateTopicRequest, dict]): + The request object. Request for the UpdateTopic method. + topic (google.pubsub_v1.types.Topic): + Required. The updated topic object. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Indicates which fields in the provided topic + to update. Must be specified and non-empty. Note that if + ``update_mask`` contains "message_storage_policy" but + the ``message_storage_policy`` is not set in the + ``topic`` provided above, then the updated value is + determined by the policy configured at the project or + organization level. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [topic, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.UpdateTopicRequest): + request = pubsub.UpdateTopicRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("topic.name", request.topic.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def publish( + self, + request: Optional[Union[pubsub.PublishRequest, dict]] = None, + *, + topic: Optional[str] = None, + messages: Optional[MutableSequence[pubsub.PubsubMessage]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.PublishResponse: + r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if + the topic does not exist. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_publish(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.PublishRequest( + topic="topic_value", + ) + + # Make the request + response = client.publish(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.PublishRequest, dict]): + The request object. Request for the Publish method. + topic (str): + Required. The messages in the request will be published + on this topic. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + messages (MutableSequence[google.pubsub_v1.types.PubsubMessage]): + Required. The messages to publish. + This corresponds to the ``messages`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.PublishResponse: + Response for the Publish method. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [topic, messages] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.PublishRequest): + request = pubsub.PublishRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + if messages is not None: + request.messages = messages + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.publish] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_topic( + self, + request: Optional[Union[pubsub.GetTopicRequest, dict]] = None, + *, + topic: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Topic: + r"""Gets the configuration of a topic. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_get_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.GetTopicRequest( + topic="topic_value", + ) + + # Make the request + response = client.get_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.GetTopicRequest, dict]): + The request object. Request for the GetTopic method. + topic (str): + Required. The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [topic] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.GetTopicRequest): + request = pubsub.GetTopicRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_topics( + self, + request: Optional[Union[pubsub.ListTopicsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTopicsPager: + r"""Lists matching topics. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_list_topics(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_topics(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListTopicsRequest, dict]): + The request object. Request for the ``ListTopics`` method. + project (str): + Required. The name of the project in which to list + topics. Format is ``projects/{project-id}``. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.services.publisher.pagers.ListTopicsPager: + Response for the ListTopics method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ListTopicsRequest): + request = pubsub.ListTopicsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_topics] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTopicsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_topic_subscriptions( + self, + request: Optional[Union[pubsub.ListTopicSubscriptionsRequest, dict]] = None, + *, + topic: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTopicSubscriptionsPager: + r"""Lists the names of the attached subscriptions on this + topic. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_list_topic_subscriptions(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSubscriptionsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_subscriptions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListTopicSubscriptionsRequest, dict]): + The request object. Request for the ``ListTopicSubscriptions`` method. + topic (str): + Required. The name of the topic that subscriptions are + attached to. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsPager: + Response for the ListTopicSubscriptions method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [topic] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ListTopicSubscriptionsRequest): + request = pubsub.ListTopicSubscriptionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_topic_subscriptions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTopicSubscriptionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_topic_snapshots( + self, + request: Optional[Union[pubsub.ListTopicSnapshotsRequest, dict]] = None, + *, + topic: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListTopicSnapshotsPager: + r"""Lists the names of the snapshots on this topic. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_list_topic_snapshots(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSnapshotsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_snapshots(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListTopicSnapshotsRequest, dict]): + The request object. Request for the ``ListTopicSnapshots`` method. + topic (str): + Required. The name of the topic that snapshots are + attached to. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsPager: + Response for the ListTopicSnapshots method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [topic] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ListTopicSnapshotsRequest): + request = pubsub.ListTopicSnapshotsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_topic_snapshots] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTopicSnapshotsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_topic( + self, + request: Optional[Union[pubsub.DeleteTopicRequest, dict]] = None, + *, + topic: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if + the topic does not exist. After a topic is deleted, a new topic + may be created with the same name; this is an entirely new topic + with none of the old configuration or subscriptions. Existing + subscriptions to this topic are not deleted, but their ``topic`` + field is set to ``_deleted-topic_``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_delete_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteTopicRequest( + topic="topic_value", + ) + + # Make the request + client.delete_topic(request=request) + + Args: + request (Union[google.pubsub_v1.types.DeleteTopicRequest, dict]): + The request object. Request for the ``DeleteTopic`` method. + topic (str): + Required. Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [topic] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.DeleteTopicRequest): + request = pubsub.DeleteTopicRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def detach_subscription( + self, + request: Optional[Union[pubsub.DetachSubscriptionRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.DetachSubscriptionResponse: + r"""Detaches a subscription from this topic. All messages retained + in the subscription are dropped. Subsequent ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. If + the subscription is a push subscription, pushes to the endpoint + will stop. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_detach_subscription(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.DetachSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.detach_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.DetachSubscriptionRequest, dict]): + The request object. Request for the DetachSubscription + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.DetachSubscriptionResponse: + Response for the DetachSubscription + method. Reserved for future use. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.DetachSubscriptionRequest): + request = pubsub.DetachSubscriptionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.detach_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "PublisherClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: TimeoutType = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (TimeoutType): + The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("PublisherClient",) diff --git a/google/pubsub_v1/services/publisher/pagers.py b/google/pubsub_v1/services/publisher/pagers.py new file mode 100644 index 000000000..162d9da79 --- /dev/null +++ b/google/pubsub_v1/services/publisher/pagers.py @@ -0,0 +1,508 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Sequence, + Tuple, + Optional, + Iterator, + Union, +) + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.pubsub_v1.types import pubsub + + +class ListTopicsPager: + """A pager for iterating through ``list_topics`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListTopicsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``topics`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTopics`` requests and continue to iterate + through the ``topics`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListTopicsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., pubsub.ListTopicsResponse], + request: pubsub.ListTopicsRequest, + response: pubsub.ListTopicsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListTopicsRequest): + The initial request object. + response (google.pubsub_v1.types.ListTopicsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = pubsub.ListTopicsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[pubsub.ListTopicsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[pubsub.Topic]: + for page in self.pages: + yield from page.topics + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicsAsyncPager: + """A pager for iterating through ``list_topics`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListTopicsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``topics`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTopics`` requests and continue to iterate + through the ``topics`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListTopicsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[pubsub.ListTopicsResponse]], + request: pubsub.ListTopicsRequest, + response: pubsub.ListTopicsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListTopicsRequest): + The initial request object. + response (google.pubsub_v1.types.ListTopicsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = pubsub.ListTopicsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[pubsub.ListTopicsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[pubsub.Topic]: + async def async_generator(): + async for page in self.pages: + for response in page.topics: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicSubscriptionsPager: + """A pager for iterating through ``list_topic_subscriptions`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListTopicSubscriptionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTopicSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListTopicSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., pubsub.ListTopicSubscriptionsResponse], + request: pubsub.ListTopicSubscriptionsRequest, + response: pubsub.ListTopicSubscriptionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListTopicSubscriptionsRequest): + The initial request object. + response (google.pubsub_v1.types.ListTopicSubscriptionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = pubsub.ListTopicSubscriptionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[pubsub.ListTopicSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[str]: + for page in self.pages: + yield from page.subscriptions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicSubscriptionsAsyncPager: + """A pager for iterating through ``list_topic_subscriptions`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListTopicSubscriptionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTopicSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListTopicSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[pubsub.ListTopicSubscriptionsResponse]], + request: pubsub.ListTopicSubscriptionsRequest, + response: pubsub.ListTopicSubscriptionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListTopicSubscriptionsRequest): + The initial request object. + response (google.pubsub_v1.types.ListTopicSubscriptionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = pubsub.ListTopicSubscriptionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[pubsub.ListTopicSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[str]: + async def async_generator(): + async for page in self.pages: + for response in page.subscriptions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicSnapshotsPager: + """A pager for iterating through ``list_topic_snapshots`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListTopicSnapshotsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``snapshots`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTopicSnapshots`` requests and continue to iterate + through the ``snapshots`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListTopicSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., pubsub.ListTopicSnapshotsResponse], + request: pubsub.ListTopicSnapshotsRequest, + response: pubsub.ListTopicSnapshotsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListTopicSnapshotsRequest): + The initial request object. + response (google.pubsub_v1.types.ListTopicSnapshotsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = pubsub.ListTopicSnapshotsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[pubsub.ListTopicSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[str]: + for page in self.pages: + yield from page.snapshots + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicSnapshotsAsyncPager: + """A pager for iterating through ``list_topic_snapshots`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListTopicSnapshotsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``snapshots`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTopicSnapshots`` requests and continue to iterate + through the ``snapshots`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListTopicSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[pubsub.ListTopicSnapshotsResponse]], + request: pubsub.ListTopicSnapshotsRequest, + response: pubsub.ListTopicSnapshotsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListTopicSnapshotsRequest): + The initial request object. + response (google.pubsub_v1.types.ListTopicSnapshotsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = pubsub.ListTopicSnapshotsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[pubsub.ListTopicSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[str]: + async def async_generator(): + async for page in self.pages: + for response in page.snapshots: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/pubsub_v1/services/publisher/transports/README.rst b/google/pubsub_v1/services/publisher/transports/README.rst new file mode 100644 index 000000000..489748f4d --- /dev/null +++ b/google/pubsub_v1/services/publisher/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`PublisherTransport` is the ABC for all transports. +- public child `PublisherGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `PublisherGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BasePublisherRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `PublisherRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/google/pubsub_v1/services/publisher/transports/__init__.py b/google/pubsub_v1/services/publisher/transports/__init__.py new file mode 100644 index 000000000..75bfa7de0 --- /dev/null +++ b/google/pubsub_v1/services/publisher/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PublisherTransport +from .grpc import PublisherGrpcTransport +from .grpc_asyncio import PublisherGrpcAsyncIOTransport +from .rest import PublisherRestTransport +from .rest import PublisherRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PublisherTransport]] +_transport_registry["grpc"] = PublisherGrpcTransport +_transport_registry["grpc_asyncio"] = PublisherGrpcAsyncIOTransport +_transport_registry["rest"] = PublisherRestTransport + +__all__ = ( + "PublisherTransport", + "PublisherGrpcTransport", + "PublisherGrpcAsyncIOTransport", + "PublisherRestTransport", + "PublisherRestInterceptor", +) diff --git a/google/pubsub_v1/services/publisher/transports/base.py b/google/pubsub_v1/services/publisher/transports/base.py new file mode 100644 index 000000000..b9d6a6279 --- /dev/null +++ b/google/pubsub_v1/services/publisher/transports/base.py @@ -0,0 +1,428 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.pubsub_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class PublisherTransport(abc.ABC): + """Abstract transport class for Publisher.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ) + + DEFAULT_HOST: str = "pubsub.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_topic: gapic_v1.method.wrap_method( + self.create_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_topic: gapic_v1.method.wrap_method( + self.update_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.publish: gapic_v1.method.wrap_method( + self.publish, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=4, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.Cancelled, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=600.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_topic: gapic_v1.method.wrap_method( + self.get_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=600.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topics: gapic_v1.method.wrap_method( + self.list_topics, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=600.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topic_subscriptions: gapic_v1.method.wrap_method( + self.list_topic_subscriptions, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=600.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topic_snapshots: gapic_v1.method.wrap_method( + self.list_topic_snapshots, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=600.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_topic: gapic_v1.method.wrap_method( + self.delete_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.detach_subscription: gapic_v1.method.wrap_method( + self.detach_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_topic( + self, + ) -> Callable[[pubsub.Topic], Union[pubsub.Topic, Awaitable[pubsub.Topic]]]: + raise NotImplementedError() + + @property + def update_topic( + self, + ) -> Callable[ + [pubsub.UpdateTopicRequest], Union[pubsub.Topic, Awaitable[pubsub.Topic]] + ]: + raise NotImplementedError() + + @property + def publish( + self, + ) -> Callable[ + [pubsub.PublishRequest], + Union[pubsub.PublishResponse, Awaitable[pubsub.PublishResponse]], + ]: + raise NotImplementedError() + + @property + def get_topic( + self, + ) -> Callable[ + [pubsub.GetTopicRequest], Union[pubsub.Topic, Awaitable[pubsub.Topic]] + ]: + raise NotImplementedError() + + @property + def list_topics( + self, + ) -> Callable[ + [pubsub.ListTopicsRequest], + Union[pubsub.ListTopicsResponse, Awaitable[pubsub.ListTopicsResponse]], + ]: + raise NotImplementedError() + + @property + def list_topic_subscriptions( + self, + ) -> Callable[ + [pubsub.ListTopicSubscriptionsRequest], + Union[ + pubsub.ListTopicSubscriptionsResponse, + Awaitable[pubsub.ListTopicSubscriptionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_topic_snapshots( + self, + ) -> Callable[ + [pubsub.ListTopicSnapshotsRequest], + Union[ + pubsub.ListTopicSnapshotsResponse, + Awaitable[pubsub.ListTopicSnapshotsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_topic( + self, + ) -> Callable[ + [pubsub.DeleteTopicRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: + raise NotImplementedError() + + @property + def detach_subscription( + self, + ) -> Callable[ + [pubsub.DetachSubscriptionRequest], + Union[ + pubsub.DetachSubscriptionResponse, + Awaitable[pubsub.DetachSubscriptionResponse], + ], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("PublisherTransport",) diff --git a/google/pubsub_v1/services/publisher/transports/grpc.py b/google/pubsub_v1/services/publisher/transports/grpc.py new file mode 100644 index 000000000..e192152d8 --- /dev/null +++ b/google/pubsub_v1/services/publisher/transports/grpc.py @@ -0,0 +1,669 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub +from .base import PublisherTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class PublisherGrpcTransport(PublisherTransport): + """gRPC backend transport for Publisher. + + The service that an application uses to manipulate topics, + and to send messages to a topic. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_topic(self) -> Callable[[pubsub.Topic], pubsub.Topic]: + r"""Return a callable for the create topic method over gRPC. + + Creates the given topic with the given name. See the [resource + name rules] + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + + Returns: + Callable[[~.Topic], + ~.Topic]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_topic" not in self._stubs: + self._stubs["create_topic"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/CreateTopic", + request_serializer=pubsub.Topic.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["create_topic"] + + @property + def update_topic(self) -> Callable[[pubsub.UpdateTopicRequest], pubsub.Topic]: + r"""Return a callable for the update topic method over gRPC. + + Updates an existing topic by updating the fields + specified in the update mask. Note that certain + properties of a topic are not modifiable. + + Returns: + Callable[[~.UpdateTopicRequest], + ~.Topic]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_topic" not in self._stubs: + self._stubs["update_topic"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/UpdateTopic", + request_serializer=pubsub.UpdateTopicRequest.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["update_topic"] + + @property + def publish(self) -> Callable[[pubsub.PublishRequest], pubsub.PublishResponse]: + r"""Return a callable for the publish method over gRPC. + + Adds one or more messages to the topic. Returns ``NOT_FOUND`` if + the topic does not exist. + + Returns: + Callable[[~.PublishRequest], + ~.PublishResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "publish" not in self._stubs: + self._stubs["publish"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/Publish", + request_serializer=pubsub.PublishRequest.serialize, + response_deserializer=pubsub.PublishResponse.deserialize, + ) + return self._stubs["publish"] + + @property + def get_topic(self) -> Callable[[pubsub.GetTopicRequest], pubsub.Topic]: + r"""Return a callable for the get topic method over gRPC. + + Gets the configuration of a topic. + + Returns: + Callable[[~.GetTopicRequest], + ~.Topic]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_topic" not in self._stubs: + self._stubs["get_topic"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/GetTopic", + request_serializer=pubsub.GetTopicRequest.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["get_topic"] + + @property + def list_topics( + self, + ) -> Callable[[pubsub.ListTopicsRequest], pubsub.ListTopicsResponse]: + r"""Return a callable for the list topics method over gRPC. + + Lists matching topics. + + Returns: + Callable[[~.ListTopicsRequest], + ~.ListTopicsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topics" not in self._stubs: + self._stubs["list_topics"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopics", + request_serializer=pubsub.ListTopicsRequest.serialize, + response_deserializer=pubsub.ListTopicsResponse.deserialize, + ) + return self._stubs["list_topics"] + + @property + def list_topic_subscriptions( + self, + ) -> Callable[ + [pubsub.ListTopicSubscriptionsRequest], pubsub.ListTopicSubscriptionsResponse + ]: + r"""Return a callable for the list topic subscriptions method over gRPC. + + Lists the names of the attached subscriptions on this + topic. + + Returns: + Callable[[~.ListTopicSubscriptionsRequest], + ~.ListTopicSubscriptionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topic_subscriptions" not in self._stubs: + self._stubs["list_topic_subscriptions"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopicSubscriptions", + request_serializer=pubsub.ListTopicSubscriptionsRequest.serialize, + response_deserializer=pubsub.ListTopicSubscriptionsResponse.deserialize, + ) + return self._stubs["list_topic_subscriptions"] + + @property + def list_topic_snapshots( + self, + ) -> Callable[ + [pubsub.ListTopicSnapshotsRequest], pubsub.ListTopicSnapshotsResponse + ]: + r"""Return a callable for the list topic snapshots method over gRPC. + + Lists the names of the snapshots on this topic. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.ListTopicSnapshotsRequest], + ~.ListTopicSnapshotsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topic_snapshots" not in self._stubs: + self._stubs["list_topic_snapshots"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopicSnapshots", + request_serializer=pubsub.ListTopicSnapshotsRequest.serialize, + response_deserializer=pubsub.ListTopicSnapshotsResponse.deserialize, + ) + return self._stubs["list_topic_snapshots"] + + @property + def delete_topic(self) -> Callable[[pubsub.DeleteTopicRequest], empty_pb2.Empty]: + r"""Return a callable for the delete topic method over gRPC. + + Deletes the topic with the given name. Returns ``NOT_FOUND`` if + the topic does not exist. After a topic is deleted, a new topic + may be created with the same name; this is an entirely new topic + with none of the old configuration or subscriptions. Existing + subscriptions to this topic are not deleted, but their ``topic`` + field is set to ``_deleted-topic_``. + + Returns: + Callable[[~.DeleteTopicRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_topic" not in self._stubs: + self._stubs["delete_topic"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/DeleteTopic", + request_serializer=pubsub.DeleteTopicRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_topic"] + + @property + def detach_subscription( + self, + ) -> Callable[ + [pubsub.DetachSubscriptionRequest], pubsub.DetachSubscriptionResponse + ]: + r"""Return a callable for the detach subscription method over gRPC. + + Detaches a subscription from this topic. All messages retained + in the subscription are dropped. Subsequent ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. If + the subscription is a push subscription, pushes to the endpoint + will stop. + + Returns: + Callable[[~.DetachSubscriptionRequest], + ~.DetachSubscriptionResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "detach_subscription" not in self._stubs: + self._stubs["detach_subscription"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/DetachSubscription", + request_serializer=pubsub.DetachSubscriptionRequest.serialize, + response_deserializer=pubsub.DetachSubscriptionResponse.deserialize, + ) + return self._stubs["detach_subscription"] + + def close(self): + self._logged_channel.close() + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("PublisherGrpcTransport",) diff --git a/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py new file mode 100644 index 000000000..14b9fdd06 --- /dev/null +++ b/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -0,0 +1,849 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub +from .base import PublisherTransport, DEFAULT_CLIENT_INFO +from .grpc import PublisherGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class PublisherGrpcAsyncIOTransport(PublisherTransport): + """gRPC AsyncIO backend transport for Publisher. + + The service that an application uses to manipulate topics, + and to send messages to a topic. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_topic(self) -> Callable[[pubsub.Topic], Awaitable[pubsub.Topic]]: + r"""Return a callable for the create topic method over gRPC. + + Creates the given topic with the given name. See the [resource + name rules] + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + + Returns: + Callable[[~.Topic], + Awaitable[~.Topic]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_topic" not in self._stubs: + self._stubs["create_topic"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/CreateTopic", + request_serializer=pubsub.Topic.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["create_topic"] + + @property + def update_topic( + self, + ) -> Callable[[pubsub.UpdateTopicRequest], Awaitable[pubsub.Topic]]: + r"""Return a callable for the update topic method over gRPC. + + Updates an existing topic by updating the fields + specified in the update mask. Note that certain + properties of a topic are not modifiable. + + Returns: + Callable[[~.UpdateTopicRequest], + Awaitable[~.Topic]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_topic" not in self._stubs: + self._stubs["update_topic"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/UpdateTopic", + request_serializer=pubsub.UpdateTopicRequest.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["update_topic"] + + @property + def publish( + self, + ) -> Callable[[pubsub.PublishRequest], Awaitable[pubsub.PublishResponse]]: + r"""Return a callable for the publish method over gRPC. + + Adds one or more messages to the topic. Returns ``NOT_FOUND`` if + the topic does not exist. + + Returns: + Callable[[~.PublishRequest], + Awaitable[~.PublishResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "publish" not in self._stubs: + self._stubs["publish"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/Publish", + request_serializer=pubsub.PublishRequest.serialize, + response_deserializer=pubsub.PublishResponse.deserialize, + ) + return self._stubs["publish"] + + @property + def get_topic(self) -> Callable[[pubsub.GetTopicRequest], Awaitable[pubsub.Topic]]: + r"""Return a callable for the get topic method over gRPC. + + Gets the configuration of a topic. + + Returns: + Callable[[~.GetTopicRequest], + Awaitable[~.Topic]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_topic" not in self._stubs: + self._stubs["get_topic"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/GetTopic", + request_serializer=pubsub.GetTopicRequest.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["get_topic"] + + @property + def list_topics( + self, + ) -> Callable[[pubsub.ListTopicsRequest], Awaitable[pubsub.ListTopicsResponse]]: + r"""Return a callable for the list topics method over gRPC. + + Lists matching topics. + + Returns: + Callable[[~.ListTopicsRequest], + Awaitable[~.ListTopicsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topics" not in self._stubs: + self._stubs["list_topics"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopics", + request_serializer=pubsub.ListTopicsRequest.serialize, + response_deserializer=pubsub.ListTopicsResponse.deserialize, + ) + return self._stubs["list_topics"] + + @property + def list_topic_subscriptions( + self, + ) -> Callable[ + [pubsub.ListTopicSubscriptionsRequest], + Awaitable[pubsub.ListTopicSubscriptionsResponse], + ]: + r"""Return a callable for the list topic subscriptions method over gRPC. + + Lists the names of the attached subscriptions on this + topic. + + Returns: + Callable[[~.ListTopicSubscriptionsRequest], + Awaitable[~.ListTopicSubscriptionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topic_subscriptions" not in self._stubs: + self._stubs["list_topic_subscriptions"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopicSubscriptions", + request_serializer=pubsub.ListTopicSubscriptionsRequest.serialize, + response_deserializer=pubsub.ListTopicSubscriptionsResponse.deserialize, + ) + return self._stubs["list_topic_subscriptions"] + + @property + def list_topic_snapshots( + self, + ) -> Callable[ + [pubsub.ListTopicSnapshotsRequest], Awaitable[pubsub.ListTopicSnapshotsResponse] + ]: + r"""Return a callable for the list topic snapshots method over gRPC. + + Lists the names of the snapshots on this topic. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.ListTopicSnapshotsRequest], + Awaitable[~.ListTopicSnapshotsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topic_snapshots" not in self._stubs: + self._stubs["list_topic_snapshots"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopicSnapshots", + request_serializer=pubsub.ListTopicSnapshotsRequest.serialize, + response_deserializer=pubsub.ListTopicSnapshotsResponse.deserialize, + ) + return self._stubs["list_topic_snapshots"] + + @property + def delete_topic( + self, + ) -> Callable[[pubsub.DeleteTopicRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete topic method over gRPC. + + Deletes the topic with the given name. Returns ``NOT_FOUND`` if + the topic does not exist. After a topic is deleted, a new topic + may be created with the same name; this is an entirely new topic + with none of the old configuration or subscriptions. Existing + subscriptions to this topic are not deleted, but their ``topic`` + field is set to ``_deleted-topic_``. + + Returns: + Callable[[~.DeleteTopicRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_topic" not in self._stubs: + self._stubs["delete_topic"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/DeleteTopic", + request_serializer=pubsub.DeleteTopicRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_topic"] + + @property + def detach_subscription( + self, + ) -> Callable[ + [pubsub.DetachSubscriptionRequest], Awaitable[pubsub.DetachSubscriptionResponse] + ]: + r"""Return a callable for the detach subscription method over gRPC. + + Detaches a subscription from this topic. All messages retained + in the subscription are dropped. Subsequent ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. If + the subscription is a push subscription, pushes to the endpoint + will stop. + + Returns: + Callable[[~.DetachSubscriptionRequest], + Awaitable[~.DetachSubscriptionResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "detach_subscription" not in self._stubs: + self._stubs["detach_subscription"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Publisher/DetachSubscription", + request_serializer=pubsub.DetachSubscriptionRequest.serialize, + response_deserializer=pubsub.DetachSubscriptionResponse.deserialize, + ) + return self._stubs["detach_subscription"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_topic: self._wrap_method( + self.create_topic, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_topic: self._wrap_method( + self.update_topic, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.publish: self._wrap_method( + self.publish, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=4, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.Cancelled, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_topic: self._wrap_method( + self.get_topic, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topics: self._wrap_method( + self.list_topics, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topic_subscriptions: self._wrap_method( + self.list_topic_subscriptions, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topic_snapshots: self._wrap_method( + self.list_topic_snapshots, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_topic: self._wrap_method( + self.delete_topic, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.detach_subscription: self._wrap_method( + self.detach_subscription, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("PublisherGrpcAsyncIOTransport",) diff --git a/google/pubsub_v1/services/publisher/transports/rest.py b/google/pubsub_v1/services/publisher/transports/rest.py new file mode 100644 index 000000000..aeb07184c --- /dev/null +++ b/google/pubsub_v1/services/publisher/transports/rest.py @@ -0,0 +1,2512 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub + + +from .rest_base import _BasePublisherRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class PublisherRestInterceptor: + """Interceptor for Publisher. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PublisherRestTransport. + + .. code-block:: python + class MyCustomPublisherInterceptor(PublisherRestInterceptor): + def pre_create_topic(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_topic(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_topic(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_detach_subscription(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_detach_subscription(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_topic(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_topic(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_topics(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_topics(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_topic_snapshots(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_topic_snapshots(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_topic_subscriptions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_topic_subscriptions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_publish(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_publish(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_topic(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_topic(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PublisherRestTransport(interceptor=MyCustomPublisherInterceptor()) + client = PublisherClient(transport=transport) + + + """ + + def pre_create_topic( + self, request: pubsub.Topic, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[pubsub.Topic, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_topic + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_create_topic(self, response: pubsub.Topic) -> pubsub.Topic: + """Post-rpc interceptor for create_topic + + DEPRECATED. Please use the `post_create_topic_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. This `post_create_topic` interceptor runs + before the `post_create_topic_with_metadata` interceptor. + """ + return response + + def post_create_topic_with_metadata( + self, response: pubsub.Topic, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[pubsub.Topic, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_topic + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_create_topic_with_metadata` + interceptor in new development instead of the `post_create_topic` interceptor. + When both interceptors are used, this `post_create_topic_with_metadata` interceptor runs after the + `post_create_topic` interceptor. The (possibly modified) response returned by + `post_create_topic` will be passed to + `post_create_topic_with_metadata`. + """ + return response, metadata + + def pre_delete_topic( + self, + request: pubsub.DeleteTopicRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.DeleteTopicRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_topic + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def pre_detach_subscription( + self, + request: pubsub.DetachSubscriptionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.DetachSubscriptionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for detach_subscription + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_detach_subscription( + self, response: pubsub.DetachSubscriptionResponse + ) -> pubsub.DetachSubscriptionResponse: + """Post-rpc interceptor for detach_subscription + + DEPRECATED. Please use the `post_detach_subscription_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. This `post_detach_subscription` interceptor runs + before the `post_detach_subscription_with_metadata` interceptor. + """ + return response + + def post_detach_subscription_with_metadata( + self, + response: pubsub.DetachSubscriptionResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.DetachSubscriptionResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for detach_subscription + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_detach_subscription_with_metadata` + interceptor in new development instead of the `post_detach_subscription` interceptor. + When both interceptors are used, this `post_detach_subscription_with_metadata` interceptor runs after the + `post_detach_subscription` interceptor. The (possibly modified) response returned by + `post_detach_subscription` will be passed to + `post_detach_subscription_with_metadata`. + """ + return response, metadata + + def pre_get_topic( + self, + request: pubsub.GetTopicRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.GetTopicRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_topic + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_get_topic(self, response: pubsub.Topic) -> pubsub.Topic: + """Post-rpc interceptor for get_topic + + DEPRECATED. Please use the `post_get_topic_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. This `post_get_topic` interceptor runs + before the `post_get_topic_with_metadata` interceptor. + """ + return response + + def post_get_topic_with_metadata( + self, response: pubsub.Topic, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[pubsub.Topic, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_topic + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_get_topic_with_metadata` + interceptor in new development instead of the `post_get_topic` interceptor. + When both interceptors are used, this `post_get_topic_with_metadata` interceptor runs after the + `post_get_topic` interceptor. The (possibly modified) response returned by + `post_get_topic` will be passed to + `post_get_topic_with_metadata`. + """ + return response, metadata + + def pre_list_topics( + self, + request: pubsub.ListTopicsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.ListTopicsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_topics + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_list_topics( + self, response: pubsub.ListTopicsResponse + ) -> pubsub.ListTopicsResponse: + """Post-rpc interceptor for list_topics + + DEPRECATED. Please use the `post_list_topics_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. This `post_list_topics` interceptor runs + before the `post_list_topics_with_metadata` interceptor. + """ + return response + + def post_list_topics_with_metadata( + self, + response: pubsub.ListTopicsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.ListTopicsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_topics + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_list_topics_with_metadata` + interceptor in new development instead of the `post_list_topics` interceptor. + When both interceptors are used, this `post_list_topics_with_metadata` interceptor runs after the + `post_list_topics` interceptor. The (possibly modified) response returned by + `post_list_topics` will be passed to + `post_list_topics_with_metadata`. + """ + return response, metadata + + def pre_list_topic_snapshots( + self, + request: pubsub.ListTopicSnapshotsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.ListTopicSnapshotsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_topic_snapshots + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_list_topic_snapshots( + self, response: pubsub.ListTopicSnapshotsResponse + ) -> pubsub.ListTopicSnapshotsResponse: + """Post-rpc interceptor for list_topic_snapshots + + DEPRECATED. Please use the `post_list_topic_snapshots_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. This `post_list_topic_snapshots` interceptor runs + before the `post_list_topic_snapshots_with_metadata` interceptor. + """ + return response + + def post_list_topic_snapshots_with_metadata( + self, + response: pubsub.ListTopicSnapshotsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.ListTopicSnapshotsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_topic_snapshots + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_list_topic_snapshots_with_metadata` + interceptor in new development instead of the `post_list_topic_snapshots` interceptor. + When both interceptors are used, this `post_list_topic_snapshots_with_metadata` interceptor runs after the + `post_list_topic_snapshots` interceptor. The (possibly modified) response returned by + `post_list_topic_snapshots` will be passed to + `post_list_topic_snapshots_with_metadata`. + """ + return response, metadata + + def pre_list_topic_subscriptions( + self, + request: pubsub.ListTopicSubscriptionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.ListTopicSubscriptionsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_topic_subscriptions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_list_topic_subscriptions( + self, response: pubsub.ListTopicSubscriptionsResponse + ) -> pubsub.ListTopicSubscriptionsResponse: + """Post-rpc interceptor for list_topic_subscriptions + + DEPRECATED. Please use the `post_list_topic_subscriptions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. This `post_list_topic_subscriptions` interceptor runs + before the `post_list_topic_subscriptions_with_metadata` interceptor. + """ + return response + + def post_list_topic_subscriptions_with_metadata( + self, + response: pubsub.ListTopicSubscriptionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.ListTopicSubscriptionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_topic_subscriptions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_list_topic_subscriptions_with_metadata` + interceptor in new development instead of the `post_list_topic_subscriptions` interceptor. + When both interceptors are used, this `post_list_topic_subscriptions_with_metadata` interceptor runs after the + `post_list_topic_subscriptions` interceptor. The (possibly modified) response returned by + `post_list_topic_subscriptions` will be passed to + `post_list_topic_subscriptions_with_metadata`. + """ + return response, metadata + + def pre_publish( + self, + request: pubsub.PublishRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.PublishRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for publish + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_publish(self, response: pubsub.PublishResponse) -> pubsub.PublishResponse: + """Post-rpc interceptor for publish + + DEPRECATED. Please use the `post_publish_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. This `post_publish` interceptor runs + before the `post_publish_with_metadata` interceptor. + """ + return response + + def post_publish_with_metadata( + self, + response: pubsub.PublishResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.PublishResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for publish + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_publish_with_metadata` + interceptor in new development instead of the `post_publish` interceptor. + When both interceptors are used, this `post_publish_with_metadata` interceptor runs after the + `post_publish` interceptor. The (possibly modified) response returned by + `post_publish` will be passed to + `post_publish_with_metadata`. + """ + return response, metadata + + def pre_update_topic( + self, + request: pubsub.UpdateTopicRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.UpdateTopicRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_topic + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_update_topic(self, response: pubsub.Topic) -> pubsub.Topic: + """Post-rpc interceptor for update_topic + + DEPRECATED. Please use the `post_update_topic_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. This `post_update_topic` interceptor runs + before the `post_update_topic_with_metadata` interceptor. + """ + return response + + def post_update_topic_with_metadata( + self, response: pubsub.Topic, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[pubsub.Topic, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_topic + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_update_topic_with_metadata` + interceptor in new development instead of the `post_update_topic` interceptor. + When both interceptors are used, this `post_update_topic_with_metadata` interceptor runs after the + `post_update_topic` interceptor. The (possibly modified) response returned by + `post_update_topic` will be passed to + `post_update_topic_with_metadata`. + """ + return response, metadata + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PublisherRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PublisherRestInterceptor + + +class PublisherRestTransport(_BasePublisherRestTransport): + """REST backend synchronous transport for Publisher. + + The service that an application uses to manipulate topics, + and to send messages to a topic. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[PublisherRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PublisherRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateTopic(_BasePublisherRestTransport._BaseCreateTopic, PublisherRestStub): + def __hash__(self): + return hash("PublisherRestTransport.CreateTopic") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: pubsub.Topic, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Topic: + r"""Call the create topic method over HTTP. + + Args: + request (~.pubsub.Topic): + The request object. A topic resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + + http_options = ( + _BasePublisherRestTransport._BaseCreateTopic._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_topic(request, metadata) + transcoded_request = ( + _BasePublisherRestTransport._BaseCreateTopic._get_transcoded_request( + http_options, request + ) + ) + + body = _BasePublisherRestTransport._BaseCreateTopic._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BasePublisherRestTransport._BaseCreateTopic._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.CreateTopic", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "CreateTopic", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PublisherRestTransport._CreateTopic._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Topic() + pb_resp = pubsub.Topic.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_topic(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_topic_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Topic.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherClient.create_topic", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "CreateTopic", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteTopic(_BasePublisherRestTransport._BaseDeleteTopic, PublisherRestStub): + def __hash__(self): + return hash("PublisherRestTransport.DeleteTopic") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: pubsub.DeleteTopicRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete topic method over HTTP. + + Args: + request (~.pubsub.DeleteTopicRequest): + The request object. Request for the ``DeleteTopic`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BasePublisherRestTransport._BaseDeleteTopic._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_topic(request, metadata) + transcoded_request = ( + _BasePublisherRestTransport._BaseDeleteTopic._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BasePublisherRestTransport._BaseDeleteTopic._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.DeleteTopic", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "DeleteTopic", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PublisherRestTransport._DeleteTopic._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DetachSubscription( + _BasePublisherRestTransport._BaseDetachSubscription, PublisherRestStub + ): + def __hash__(self): + return hash("PublisherRestTransport.DetachSubscription") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: pubsub.DetachSubscriptionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.DetachSubscriptionResponse: + r"""Call the detach subscription method over HTTP. + + Args: + request (~.pubsub.DetachSubscriptionRequest): + The request object. Request for the DetachSubscription + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.DetachSubscriptionResponse: + Response for the DetachSubscription + method. Reserved for future use. + + """ + + http_options = ( + _BasePublisherRestTransport._BaseDetachSubscription._get_http_options() + ) + + request, metadata = self._interceptor.pre_detach_subscription( + request, metadata + ) + transcoded_request = _BasePublisherRestTransport._BaseDetachSubscription._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BasePublisherRestTransport._BaseDetachSubscription._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.DetachSubscription", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "DetachSubscription", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PublisherRestTransport._DetachSubscription._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.DetachSubscriptionResponse() + pb_resp = pubsub.DetachSubscriptionResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_detach_subscription(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_detach_subscription_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.DetachSubscriptionResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherClient.detach_subscription", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "DetachSubscription", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetTopic(_BasePublisherRestTransport._BaseGetTopic, PublisherRestStub): + def __hash__(self): + return hash("PublisherRestTransport.GetTopic") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: pubsub.GetTopicRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Topic: + r"""Call the get topic method over HTTP. + + Args: + request (~.pubsub.GetTopicRequest): + The request object. Request for the GetTopic method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + + http_options = _BasePublisherRestTransport._BaseGetTopic._get_http_options() + + request, metadata = self._interceptor.pre_get_topic(request, metadata) + transcoded_request = ( + _BasePublisherRestTransport._BaseGetTopic._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BasePublisherRestTransport._BaseGetTopic._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.GetTopic", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "GetTopic", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PublisherRestTransport._GetTopic._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Topic() + pb_resp = pubsub.Topic.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_topic(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_topic_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Topic.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherClient.get_topic", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "GetTopic", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListTopics(_BasePublisherRestTransport._BaseListTopics, PublisherRestStub): + def __hash__(self): + return hash("PublisherRestTransport.ListTopics") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: pubsub.ListTopicsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.ListTopicsResponse: + r"""Call the list topics method over HTTP. + + Args: + request (~.pubsub.ListTopicsRequest): + The request object. Request for the ``ListTopics`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.ListTopicsResponse: + Response for the ``ListTopics`` method. + """ + + http_options = ( + _BasePublisherRestTransport._BaseListTopics._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_topics(request, metadata) + transcoded_request = ( + _BasePublisherRestTransport._BaseListTopics._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BasePublisherRestTransport._BaseListTopics._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.ListTopics", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "ListTopics", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PublisherRestTransport._ListTopics._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.ListTopicsResponse() + pb_resp = pubsub.ListTopicsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_topics(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_topics_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.ListTopicsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherClient.list_topics", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "ListTopics", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListTopicSnapshots( + _BasePublisherRestTransport._BaseListTopicSnapshots, PublisherRestStub + ): + def __hash__(self): + return hash("PublisherRestTransport.ListTopicSnapshots") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: pubsub.ListTopicSnapshotsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.ListTopicSnapshotsResponse: + r"""Call the list topic snapshots method over HTTP. + + Args: + request (~.pubsub.ListTopicSnapshotsRequest): + The request object. Request for the ``ListTopicSnapshots`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.ListTopicSnapshotsResponse: + Response for the ``ListTopicSnapshots`` method. + """ + + http_options = ( + _BasePublisherRestTransport._BaseListTopicSnapshots._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_topic_snapshots( + request, metadata + ) + transcoded_request = _BasePublisherRestTransport._BaseListTopicSnapshots._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BasePublisherRestTransport._BaseListTopicSnapshots._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.ListTopicSnapshots", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "ListTopicSnapshots", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PublisherRestTransport._ListTopicSnapshots._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.ListTopicSnapshotsResponse() + pb_resp = pubsub.ListTopicSnapshotsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_topic_snapshots(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_topic_snapshots_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.ListTopicSnapshotsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherClient.list_topic_snapshots", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "ListTopicSnapshots", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListTopicSubscriptions( + _BasePublisherRestTransport._BaseListTopicSubscriptions, PublisherRestStub + ): + def __hash__(self): + return hash("PublisherRestTransport.ListTopicSubscriptions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: pubsub.ListTopicSubscriptionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.ListTopicSubscriptionsResponse: + r"""Call the list topic subscriptions method over HTTP. + + Args: + request (~.pubsub.ListTopicSubscriptionsRequest): + The request object. Request for the ``ListTopicSubscriptions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.ListTopicSubscriptionsResponse: + Response for the ``ListTopicSubscriptions`` method. + """ + + http_options = ( + _BasePublisherRestTransport._BaseListTopicSubscriptions._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_topic_subscriptions( + request, metadata + ) + transcoded_request = _BasePublisherRestTransport._BaseListTopicSubscriptions._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BasePublisherRestTransport._BaseListTopicSubscriptions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.ListTopicSubscriptions", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "ListTopicSubscriptions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PublisherRestTransport._ListTopicSubscriptions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.ListTopicSubscriptionsResponse() + pb_resp = pubsub.ListTopicSubscriptionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_topic_subscriptions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_topic_subscriptions_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.ListTopicSubscriptionsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherClient.list_topic_subscriptions", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "ListTopicSubscriptions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Publish(_BasePublisherRestTransport._BasePublish, PublisherRestStub): + def __hash__(self): + return hash("PublisherRestTransport.Publish") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: pubsub.PublishRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.PublishResponse: + r"""Call the publish method over HTTP. + + Args: + request (~.pubsub.PublishRequest): + The request object. Request for the Publish method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.PublishResponse: + Response for the ``Publish`` method. + """ + + http_options = _BasePublisherRestTransport._BasePublish._get_http_options() + + request, metadata = self._interceptor.pre_publish(request, metadata) + transcoded_request = ( + _BasePublisherRestTransport._BasePublish._get_transcoded_request( + http_options, request + ) + ) + + body = _BasePublisherRestTransport._BasePublish._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BasePublisherRestTransport._BasePublish._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.Publish", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "Publish", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PublisherRestTransport._Publish._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.PublishResponse() + pb_resp = pubsub.PublishResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_publish(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_publish_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.PublishResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherClient.publish", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "Publish", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateTopic(_BasePublisherRestTransport._BaseUpdateTopic, PublisherRestStub): + def __hash__(self): + return hash("PublisherRestTransport.UpdateTopic") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: pubsub.UpdateTopicRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Topic: + r"""Call the update topic method over HTTP. + + Args: + request (~.pubsub.UpdateTopicRequest): + The request object. Request for the UpdateTopic method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + + http_options = ( + _BasePublisherRestTransport._BaseUpdateTopic._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_topic(request, metadata) + transcoded_request = ( + _BasePublisherRestTransport._BaseUpdateTopic._get_transcoded_request( + http_options, request + ) + ) + + body = _BasePublisherRestTransport._BaseUpdateTopic._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BasePublisherRestTransport._BaseUpdateTopic._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.UpdateTopic", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "UpdateTopic", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PublisherRestTransport._UpdateTopic._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Topic() + pb_resp = pubsub.Topic.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_topic(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_topic_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Topic.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherClient.update_topic", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "UpdateTopic", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_topic(self) -> Callable[[pubsub.Topic], pubsub.Topic]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTopic(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_topic(self) -> Callable[[pubsub.DeleteTopicRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTopic(self._session, self._host, self._interceptor) # type: ignore + + @property + def detach_subscription( + self, + ) -> Callable[ + [pubsub.DetachSubscriptionRequest], pubsub.DetachSubscriptionResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DetachSubscription(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_topic(self) -> Callable[[pubsub.GetTopicRequest], pubsub.Topic]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTopic(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_topics( + self, + ) -> Callable[[pubsub.ListTopicsRequest], pubsub.ListTopicsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTopics(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_topic_snapshots( + self, + ) -> Callable[ + [pubsub.ListTopicSnapshotsRequest], pubsub.ListTopicSnapshotsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTopicSnapshots(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_topic_subscriptions( + self, + ) -> Callable[ + [pubsub.ListTopicSubscriptionsRequest], pubsub.ListTopicSubscriptionsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTopicSubscriptions(self._session, self._host, self._interceptor) # type: ignore + + @property + def publish(self) -> Callable[[pubsub.PublishRequest], pubsub.PublishResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Publish(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_topic(self) -> Callable[[pubsub.UpdateTopicRequest], pubsub.Topic]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTopic(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy( + _BasePublisherRestTransport._BaseGetIamPolicy, PublisherRestStub + ): + def __hash__(self): + return hash("PublisherRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options = ( + _BasePublisherRestTransport._BaseGetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = ( + _BasePublisherRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BasePublisherRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.GetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PublisherRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherAsyncClient.GetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy( + _BasePublisherRestTransport._BaseSetIamPolicy, PublisherRestStub + ): + def __hash__(self): + return hash("PublisherRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options = ( + _BasePublisherRestTransport._BaseSetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = ( + _BasePublisherRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + ) + + body = _BasePublisherRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BasePublisherRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.SetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PublisherRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherAsyncClient.SetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions( + _BasePublisherRestTransport._BaseTestIamPermissions, PublisherRestStub + ): + def __hash__(self): + return hash("PublisherRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options = ( + _BasePublisherRestTransport._BaseTestIamPermissions._get_http_options() + ) + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BasePublisherRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BasePublisherRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BasePublisherRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.TestIamPermissions", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = PublisherRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherAsyncClient.TestIamPermissions", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("PublisherRestTransport",) diff --git a/google/pubsub_v1/services/publisher/transports/rest_base.py b/google/pubsub_v1/services/publisher/transports/rest_base.py new file mode 100644 index 000000000..14308a300 --- /dev/null +++ b/google/pubsub_v1/services/publisher/transports/rest_base.py @@ -0,0 +1,678 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from .base import PublisherTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub + + +class _BasePublisherRestTransport(PublisherTransport): + """Base REST backend transport for Publisher. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreateTopic: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1/{name=projects/*/topics/*}", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.Topic.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BaseCreateTopic._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteTopic: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{topic=projects/*/topics/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.DeleteTopicRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BaseDeleteTopic._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDetachSubscription: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:detach", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.DetachSubscriptionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BaseDetachSubscription._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetTopic: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{topic=projects/*/topics/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.GetTopicRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BaseGetTopic._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListTopics: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{project=projects/*}/topics", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.ListTopicsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BaseListTopics._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListTopicSnapshots: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{topic=projects/*/topics/*}/snapshots", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.ListTopicSnapshotsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BaseListTopicSnapshots._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListTopicSubscriptions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{topic=projects/*/topics/*}/subscriptions", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.ListTopicSubscriptionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BaseListTopicSubscriptions._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BasePublish: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{topic=projects/*/topics/*}:publish", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.PublishRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BasePublish._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateTopic: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{topic.name=projects/*/topics/*}", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.UpdateTopicRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BaseUpdateTopic._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/topics/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/subscriptions/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/snapshots/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/schemas/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BasePublisherRestTransport",) diff --git a/google/__init__.py b/google/pubsub_v1/services/schema_service/__init__.py similarity index 67% rename from google/__init__.py rename to google/pubsub_v1/services/schema_service/__init__.py index 9a1b64a6d..0908014e8 100644 --- a/google/__init__.py +++ b/google/pubsub_v1/services/schema_service/__init__.py @@ -1,24 +1,22 @@ # -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# +from .client import SchemaServiceClient +from .async_client import SchemaServiceAsyncClient -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) +__all__ = ( + "SchemaServiceClient", + "SchemaServiceAsyncClient", +) diff --git a/google/pubsub_v1/services/schema_service/async_client.py b/google/pubsub_v1/services/schema_service/async_client.py new file mode 100644 index 000000000..b2d139fa0 --- /dev/null +++ b/google/pubsub_v1/services/schema_service/async_client.py @@ -0,0 +1,1783 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.pubsub_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.pubsub_v1.services.schema_service import pagers +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema +from .transports.base import SchemaServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport +from .client import SchemaServiceClient + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class SchemaServiceAsyncClient: + """Service for doing schema-related operations.""" + + _client: SchemaServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = SchemaServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SchemaServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = SchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = SchemaServiceClient._DEFAULT_UNIVERSE + + schema_path = staticmethod(SchemaServiceClient.schema_path) + parse_schema_path = staticmethod(SchemaServiceClient.parse_schema_path) + common_billing_account_path = staticmethod( + SchemaServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SchemaServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SchemaServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SchemaServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + SchemaServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SchemaServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(SchemaServiceClient.common_project_path) + parse_common_project_path = staticmethod( + SchemaServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(SchemaServiceClient.common_location_path) + parse_common_location_path = staticmethod( + SchemaServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceAsyncClient: The constructed client. + """ + return SchemaServiceClient.from_service_account_info.__func__(SchemaServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceAsyncClient: The constructed client. + """ + return SchemaServiceClient.from_service_account_file.__func__(SchemaServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SchemaServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SchemaServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SchemaServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = SchemaServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, SchemaServiceTransport, Callable[..., SchemaServiceTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the schema service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SchemaServiceTransport,Callable[..., SchemaServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SchemaServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SchemaServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.pubsub_v1.SchemaServiceAsyncClient`.", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.pubsub.v1.SchemaService", + "credentialsType": None, + }, + ) + + async def create_schema( + self, + request: Optional[Union[gp_schema.CreateSchemaRequest, dict]] = None, + *, + parent: Optional[str] = None, + schema: Optional[gp_schema.Schema] = None, + schema_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gp_schema.Schema: + r"""Creates a schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_create_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CreateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = await client.create_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.CreateSchemaRequest, dict]]): + The request object. Request for the CreateSchema method. + parent (:class:`str`): + Required. The name of the project in which to create the + schema. Format is ``projects/{project-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (:class:`google.pubsub_v1.types.Schema`): + Required. The schema object to create. + + This schema's ``name`` parameter is ignored. The schema + object returned by CreateSchema will have a ``name`` + made using the given ``parent`` and ``schema_id``. + + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema_id (:class:`str`): + The ID to use for the schema, which will become the + final component of the schema's resource name. + + See + https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names + for resource name constraints. + + This corresponds to the ``schema_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, schema, schema_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gp_schema.CreateSchemaRequest): + request = gp_schema.CreateSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if schema is not None: + request.schema = schema + if schema_id is not None: + request.schema_id = schema_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_schema + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_schema( + self, + request: Optional[Union[schema.GetSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> schema.Schema: + r"""Gets a schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_get_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = await client.get_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.GetSchemaRequest, dict]]): + The request object. Request for the GetSchema method. + name (:class:`str`): + Required. The name of the schema to get. Format is + ``projects/{project}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.GetSchemaRequest): + request = schema.GetSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_schema + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_schemas( + self, + request: Optional[Union[schema.ListSchemasRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSchemasAsyncPager: + r"""Lists schemas in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_list_schemas(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.ListSchemasRequest, dict]]): + The request object. Request for the ``ListSchemas`` method. + parent (:class:`str`): + Required. The name of the project in which to list + schemas. Format is ``projects/{project-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.services.schema_service.pagers.ListSchemasAsyncPager: + Response for the ListSchemas method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.ListSchemasRequest): + request = schema.ListSchemasRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_schemas + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSchemasAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_schema_revisions( + self, + request: Optional[Union[schema.ListSchemaRevisionsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSchemaRevisionsAsyncPager: + r"""Lists all schema revisions for the named schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_list_schema_revisions(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemaRevisionsRequest( + name="name_value", + ) + + # Make the request + page_result = client.list_schema_revisions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.ListSchemaRevisionsRequest, dict]]): + The request object. Request for the ``ListSchemaRevisions`` method. + name (:class:`str`): + Required. The name of the schema to + list revisions for. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.services.schema_service.pagers.ListSchemaRevisionsAsyncPager: + Response for the ListSchemaRevisions method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.ListSchemaRevisionsRequest): + request = schema.ListSchemaRevisionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_schema_revisions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSchemaRevisionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def commit_schema( + self, + request: Optional[Union[gp_schema.CommitSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + schema: Optional[gp_schema.Schema] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gp_schema.Schema: + r"""Commits a new schema revision to an existing schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_commit_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CommitSchemaRequest( + name="name_value", + schema=schema, + ) + + # Make the request + response = await client.commit_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.CommitSchemaRequest, dict]]): + The request object. Request for CommitSchema method. + name (:class:`str`): + Required. The name of the schema we are revising. Format + is ``projects/{project}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (:class:`google.pubsub_v1.types.Schema`): + Required. The schema revision to + commit. + + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, schema] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gp_schema.CommitSchemaRequest): + request = gp_schema.CommitSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if schema is not None: + request.schema = schema + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.commit_schema + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def rollback_schema( + self, + request: Optional[Union[schema.RollbackSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + revision_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> schema.Schema: + r"""Creates a new schema revision that is a copy of the provided + revision_id. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_rollback_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.RollbackSchemaRequest( + name="name_value", + revision_id="revision_id_value", + ) + + # Make the request + response = await client.rollback_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.RollbackSchemaRequest, dict]]): + The request object. Request for the ``RollbackSchema`` method. + name (:class:`str`): + Required. The schema being rolled + back with revision id. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + revision_id (:class:`str`): + Required. The revision ID to roll + back to. It must be a revision of the + same schema. + + Example: c7cfa2a8 + + This corresponds to the ``revision_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, revision_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.RollbackSchemaRequest): + request = schema.RollbackSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if revision_id is not None: + request.revision_id = revision_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.rollback_schema + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_schema_revision( + self, + request: Optional[Union[schema.DeleteSchemaRevisionRequest, dict]] = None, + *, + name: Optional[str] = None, + revision_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> schema.Schema: + r"""Deletes a specific schema revision. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_delete_schema_revision(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRevisionRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_schema_revision(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.DeleteSchemaRevisionRequest, dict]]): + The request object. Request for the ``DeleteSchemaRevision`` method. + name (:class:`str`): + Required. The name of the schema revision to be deleted, + with a revision ID explicitly included. + + Example: ``projects/123/schemas/my-schema@c7cfa2a8`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + revision_id (:class:`str`): + Optional. This field is deprecated and should not be + used for specifying the revision ID. The revision ID + should be specified via the ``name`` parameter. + + This corresponds to the ``revision_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, revision_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.DeleteSchemaRevisionRequest): + request = schema.DeleteSchemaRevisionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if revision_id is not None: + request.revision_id = revision_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_schema_revision + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_schema( + self, + request: Optional[Union[schema.DeleteSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_delete_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + await client.delete_schema(request=request) + + Args: + request (Optional[Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]]): + The request object. Request for the ``DeleteSchema`` method. + name (:class:`str`): + Required. Name of the schema to delete. Format is + ``projects/{project}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.DeleteSchemaRequest): + request = schema.DeleteSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_schema + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def validate_schema( + self, + request: Optional[Union[gp_schema.ValidateSchemaRequest, dict]] = None, + *, + parent: Optional[str] = None, + schema: Optional[gp_schema.Schema] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gp_schema.ValidateSchemaResponse: + r"""Validates a schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_validate_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.ValidateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = await client.validate_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.ValidateSchemaRequest, dict]]): + The request object. Request for the ``ValidateSchema`` method. + parent (:class:`str`): + Required. The name of the project in which to validate + schemas. Format is ``projects/{project-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (:class:`google.pubsub_v1.types.Schema`): + Required. The schema object to + validate. + + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.ValidateSchemaResponse: + Response for the ValidateSchema method. + Empty for now. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, schema] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gp_schema.ValidateSchemaRequest): + request = gp_schema.ValidateSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if schema is not None: + request.schema = schema + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.validate_schema + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def validate_message( + self, + request: Optional[Union[schema.ValidateMessageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> schema.ValidateMessageResponse: + r"""Validates a message against a schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_validate_message(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ValidateMessageRequest( + name="name_value", + parent="parent_value", + ) + + # Make the request + response = await client.validate_message(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.ValidateMessageRequest, dict]]): + The request object. Request for the ``ValidateMessage`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.ValidateMessageResponse: + Response for the ValidateMessage method. + Empty for now. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.ValidateMessageRequest): + request = schema.ValidateMessageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.validate_message + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "SchemaServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("SchemaServiceAsyncClient",) diff --git a/google/pubsub_v1/services/schema_service/client.py b/google/pubsub_v1/services/schema_service/client.py new file mode 100644 index 000000000..300f23998 --- /dev/null +++ b/google/pubsub_v1/services/schema_service/client.py @@ -0,0 +1,2215 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import functools +import os +import re +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.pubsub_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.pubsub_v1.services.schema_service import pagers +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema + +import grpc +from .transports.base import SchemaServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import SchemaServiceGrpcTransport +from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport +from .transports.rest import SchemaServiceRestTransport + + +class SchemaServiceClientMeta(type): + """Metaclass for the SchemaService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[SchemaServiceTransport]] + _transport_registry["grpc"] = SchemaServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SchemaServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SchemaServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SchemaServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SchemaServiceClient(metaclass=SchemaServiceClientMeta): + """Service for doing schema-related operations.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "pubsub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "pubsub.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SchemaServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SchemaServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def schema_path( + project: str, + schema: str, + ) -> str: + """Returns a fully-qualified schema string.""" + return "projects/{project}/schemas/{schema}".format( + project=project, + schema=schema, + ) + + @staticmethod + def parse_schema_path(path: str) -> Dict[str, str]: + """Parses a schema path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/schemas/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = SchemaServiceClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert: + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = SchemaServiceClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert, use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = SchemaServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = SchemaServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = SchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = SchemaServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, SchemaServiceTransport, Callable[..., SchemaServiceTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the schema service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SchemaServiceTransport,Callable[..., SchemaServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SchemaServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = SchemaServiceClient._read_environment_variables() + self._client_cert_source = SchemaServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = SchemaServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, SchemaServiceTransport) + if transport_provided: + # transport is a SchemaServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(SchemaServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or SchemaServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[SchemaServiceTransport], Callable[..., SchemaServiceTransport] + ] = ( + SchemaServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., SchemaServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + + emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") + if emulator_host: + if issubclass(transport_init, type(self)._transport_registry["grpc"]): # type: ignore + channel = grpc.insecure_channel(target=emulator_host) + else: + channel = grpc.aio.insecure_channel(target=emulator_host) + transport_init = functools.partial(transport_init, channel=channel) + + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.pubsub_v1.SchemaServiceClient`.", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.pubsub.v1.SchemaService", + "credentialsType": None, + }, + ) + + def create_schema( + self, + request: Optional[Union[gp_schema.CreateSchemaRequest, dict]] = None, + *, + parent: Optional[str] = None, + schema: Optional[gp_schema.Schema] = None, + schema_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gp_schema.Schema: + r"""Creates a schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_create_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CreateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = client.create_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.CreateSchemaRequest, dict]): + The request object. Request for the CreateSchema method. + parent (str): + Required. The name of the project in which to create the + schema. Format is ``projects/{project-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (google.pubsub_v1.types.Schema): + Required. The schema object to create. + + This schema's ``name`` parameter is ignored. The schema + object returned by CreateSchema will have a ``name`` + made using the given ``parent`` and ``schema_id``. + + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema_id (str): + The ID to use for the schema, which will become the + final component of the schema's resource name. + + See + https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names + for resource name constraints. + + This corresponds to the ``schema_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, schema, schema_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gp_schema.CreateSchemaRequest): + request = gp_schema.CreateSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if schema is not None: + request.schema = schema + if schema_id is not None: + request.schema_id = schema_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_schema( + self, + request: Optional[Union[schema.GetSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> schema.Schema: + r"""Gets a schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_get_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = client.get_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.GetSchemaRequest, dict]): + The request object. Request for the GetSchema method. + name (str): + Required. The name of the schema to get. Format is + ``projects/{project}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.GetSchemaRequest): + request = schema.GetSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_schemas( + self, + request: Optional[Union[schema.ListSchemasRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSchemasPager: + r"""Lists schemas in a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_list_schemas(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListSchemasRequest, dict]): + The request object. Request for the ``ListSchemas`` method. + parent (str): + Required. The name of the project in which to list + schemas. Format is ``projects/{project-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.services.schema_service.pagers.ListSchemasPager: + Response for the ListSchemas method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.ListSchemasRequest): + request = schema.ListSchemasRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_schemas] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSchemasPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_schema_revisions( + self, + request: Optional[Union[schema.ListSchemaRevisionsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSchemaRevisionsPager: + r"""Lists all schema revisions for the named schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_list_schema_revisions(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemaRevisionsRequest( + name="name_value", + ) + + # Make the request + page_result = client.list_schema_revisions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListSchemaRevisionsRequest, dict]): + The request object. Request for the ``ListSchemaRevisions`` method. + name (str): + Required. The name of the schema to + list revisions for. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.services.schema_service.pagers.ListSchemaRevisionsPager: + Response for the ListSchemaRevisions method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.ListSchemaRevisionsRequest): + request = schema.ListSchemaRevisionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_schema_revisions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSchemaRevisionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def commit_schema( + self, + request: Optional[Union[gp_schema.CommitSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + schema: Optional[gp_schema.Schema] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gp_schema.Schema: + r"""Commits a new schema revision to an existing schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_commit_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CommitSchemaRequest( + name="name_value", + schema=schema, + ) + + # Make the request + response = client.commit_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.CommitSchemaRequest, dict]): + The request object. Request for CommitSchema method. + name (str): + Required. The name of the schema we are revising. Format + is ``projects/{project}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (google.pubsub_v1.types.Schema): + Required. The schema revision to + commit. + + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, schema] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gp_schema.CommitSchemaRequest): + request = gp_schema.CommitSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if schema is not None: + request.schema = schema + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.commit_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def rollback_schema( + self, + request: Optional[Union[schema.RollbackSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + revision_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> schema.Schema: + r"""Creates a new schema revision that is a copy of the provided + revision_id. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_rollback_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.RollbackSchemaRequest( + name="name_value", + revision_id="revision_id_value", + ) + + # Make the request + response = client.rollback_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.RollbackSchemaRequest, dict]): + The request object. Request for the ``RollbackSchema`` method. + name (str): + Required. The schema being rolled + back with revision id. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + revision_id (str): + Required. The revision ID to roll + back to. It must be a revision of the + same schema. + + Example: c7cfa2a8 + + This corresponds to the ``revision_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, revision_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.RollbackSchemaRequest): + request = schema.RollbackSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if revision_id is not None: + request.revision_id = revision_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rollback_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_schema_revision( + self, + request: Optional[Union[schema.DeleteSchemaRevisionRequest, dict]] = None, + *, + name: Optional[str] = None, + revision_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> schema.Schema: + r"""Deletes a specific schema revision. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_delete_schema_revision(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRevisionRequest( + name="name_value", + ) + + # Make the request + response = client.delete_schema_revision(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.DeleteSchemaRevisionRequest, dict]): + The request object. Request for the ``DeleteSchemaRevision`` method. + name (str): + Required. The name of the schema revision to be deleted, + with a revision ID explicitly included. + + Example: ``projects/123/schemas/my-schema@c7cfa2a8`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + revision_id (str): + Optional. This field is deprecated and should not be + used for specifying the revision ID. The revision ID + should be specified via the ``name`` parameter. + + This corresponds to the ``revision_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, revision_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.DeleteSchemaRevisionRequest): + request = schema.DeleteSchemaRevisionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if revision_id is not None: + request.revision_id = revision_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_schema_revision] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_schema( + self, + request: Optional[Union[schema.DeleteSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_delete_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + client.delete_schema(request=request) + + Args: + request (Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]): + The request object. Request for the ``DeleteSchema`` method. + name (str): + Required. Name of the schema to delete. Format is + ``projects/{project}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.DeleteSchemaRequest): + request = schema.DeleteSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def validate_schema( + self, + request: Optional[Union[gp_schema.ValidateSchemaRequest, dict]] = None, + *, + parent: Optional[str] = None, + schema: Optional[gp_schema.Schema] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gp_schema.ValidateSchemaResponse: + r"""Validates a schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_validate_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.ValidateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = client.validate_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.ValidateSchemaRequest, dict]): + The request object. Request for the ``ValidateSchema`` method. + parent (str): + Required. The name of the project in which to validate + schemas. Format is ``projects/{project-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (google.pubsub_v1.types.Schema): + Required. The schema object to + validate. + + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.ValidateSchemaResponse: + Response for the ValidateSchema method. + Empty for now. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, schema] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gp_schema.ValidateSchemaRequest): + request = gp_schema.ValidateSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if schema is not None: + request.schema = schema + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.validate_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def validate_message( + self, + request: Optional[Union[schema.ValidateMessageRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> schema.ValidateMessageResponse: + r"""Validates a message against a schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_validate_message(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ValidateMessageRequest( + name="name_value", + parent="parent_value", + ) + + # Make the request + response = client.validate_message(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.ValidateMessageRequest, dict]): + The request object. Request for the ``ValidateMessage`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.ValidateMessageResponse: + Response for the ValidateMessage method. + Empty for now. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.ValidateMessageRequest): + request = schema.ValidateMessageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.validate_message] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SchemaServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("SchemaServiceClient",) diff --git a/google/pubsub_v1/services/schema_service/pagers.py b/google/pubsub_v1/services/schema_service/pagers.py new file mode 100644 index 000000000..02beaee40 --- /dev/null +++ b/google/pubsub_v1/services/schema_service/pagers.py @@ -0,0 +1,352 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Sequence, + Tuple, + Optional, + Iterator, + Union, +) + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.pubsub_v1.types import schema + + +class ListSchemasPager: + """A pager for iterating through ``list_schemas`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListSchemasResponse` object, and + provides an ``__iter__`` method to iterate through its + ``schemas`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSchemas`` requests and continue to iterate + through the ``schemas`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListSchemasResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., schema.ListSchemasResponse], + request: schema.ListSchemasRequest, + response: schema.ListSchemasResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListSchemasRequest): + The initial request object. + response (google.pubsub_v1.types.ListSchemasResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = schema.ListSchemasRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[schema.ListSchemasResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[schema.Schema]: + for page in self.pages: + yield from page.schemas + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSchemasAsyncPager: + """A pager for iterating through ``list_schemas`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListSchemasResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``schemas`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSchemas`` requests and continue to iterate + through the ``schemas`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListSchemasResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[schema.ListSchemasResponse]], + request: schema.ListSchemasRequest, + response: schema.ListSchemasResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListSchemasRequest): + The initial request object. + response (google.pubsub_v1.types.ListSchemasResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = schema.ListSchemasRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[schema.ListSchemasResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[schema.Schema]: + async def async_generator(): + async for page in self.pages: + for response in page.schemas: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSchemaRevisionsPager: + """A pager for iterating through ``list_schema_revisions`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListSchemaRevisionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``schemas`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSchemaRevisions`` requests and continue to iterate + through the ``schemas`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListSchemaRevisionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., schema.ListSchemaRevisionsResponse], + request: schema.ListSchemaRevisionsRequest, + response: schema.ListSchemaRevisionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListSchemaRevisionsRequest): + The initial request object. + response (google.pubsub_v1.types.ListSchemaRevisionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = schema.ListSchemaRevisionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[schema.ListSchemaRevisionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[schema.Schema]: + for page in self.pages: + yield from page.schemas + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSchemaRevisionsAsyncPager: + """A pager for iterating through ``list_schema_revisions`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListSchemaRevisionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``schemas`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSchemaRevisions`` requests and continue to iterate + through the ``schemas`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListSchemaRevisionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[schema.ListSchemaRevisionsResponse]], + request: schema.ListSchemaRevisionsRequest, + response: schema.ListSchemaRevisionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListSchemaRevisionsRequest): + The initial request object. + response (google.pubsub_v1.types.ListSchemaRevisionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = schema.ListSchemaRevisionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[schema.ListSchemaRevisionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[schema.Schema]: + async def async_generator(): + async for page in self.pages: + for response in page.schemas: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/pubsub_v1/services/schema_service/transports/README.rst b/google/pubsub_v1/services/schema_service/transports/README.rst new file mode 100644 index 000000000..a0a06949e --- /dev/null +++ b/google/pubsub_v1/services/schema_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`SchemaServiceTransport` is the ABC for all transports. +- public child `SchemaServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `SchemaServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseSchemaServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `SchemaServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/google/pubsub_v1/services/schema_service/transports/__init__.py b/google/pubsub_v1/services/schema_service/transports/__init__.py new file mode 100644 index 000000000..78c2fa21d --- /dev/null +++ b/google/pubsub_v1/services/schema_service/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SchemaServiceTransport +from .grpc import SchemaServiceGrpcTransport +from .grpc_asyncio import SchemaServiceGrpcAsyncIOTransport +from .rest import SchemaServiceRestTransport +from .rest import SchemaServiceRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SchemaServiceTransport]] +_transport_registry["grpc"] = SchemaServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SchemaServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SchemaServiceRestTransport + +__all__ = ( + "SchemaServiceTransport", + "SchemaServiceGrpcTransport", + "SchemaServiceGrpcAsyncIOTransport", + "SchemaServiceRestTransport", + "SchemaServiceRestInterceptor", +) diff --git a/google/pubsub_v1/services/schema_service/transports/base.py b/google/pubsub_v1/services/schema_service/transports/base.py new file mode 100644 index 000000000..bfe254e0a --- /dev/null +++ b/google/pubsub_v1/services/schema_service/transports/base.py @@ -0,0 +1,440 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.pubsub_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class SchemaServiceTransport(abc.ABC): + """Abstract transport class for SchemaService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ) + + DEFAULT_HOST: str = "pubsub.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_schema: gapic_v1.method.wrap_method( + self.create_schema, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_schema: gapic_v1.method.wrap_method( + self.get_schema, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_schemas: gapic_v1.method.wrap_method( + self.list_schemas, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_schema_revisions: gapic_v1.method.wrap_method( + self.list_schema_revisions, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.commit_schema: gapic_v1.method.wrap_method( + self.commit_schema, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.rollback_schema: gapic_v1.method.wrap_method( + self.rollback_schema, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_schema_revision: gapic_v1.method.wrap_method( + self.delete_schema_revision, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_schema: gapic_v1.method.wrap_method( + self.delete_schema, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.validate_schema: gapic_v1.method.wrap_method( + self.validate_schema, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.validate_message: gapic_v1.method.wrap_method( + self.validate_message, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_schema( + self, + ) -> Callable[ + [gp_schema.CreateSchemaRequest], + Union[gp_schema.Schema, Awaitable[gp_schema.Schema]], + ]: + raise NotImplementedError() + + @property + def get_schema( + self, + ) -> Callable[ + [schema.GetSchemaRequest], Union[schema.Schema, Awaitable[schema.Schema]] + ]: + raise NotImplementedError() + + @property + def list_schemas( + self, + ) -> Callable[ + [schema.ListSchemasRequest], + Union[schema.ListSchemasResponse, Awaitable[schema.ListSchemasResponse]], + ]: + raise NotImplementedError() + + @property + def list_schema_revisions( + self, + ) -> Callable[ + [schema.ListSchemaRevisionsRequest], + Union[ + schema.ListSchemaRevisionsResponse, + Awaitable[schema.ListSchemaRevisionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def commit_schema( + self, + ) -> Callable[ + [gp_schema.CommitSchemaRequest], + Union[gp_schema.Schema, Awaitable[gp_schema.Schema]], + ]: + raise NotImplementedError() + + @property + def rollback_schema( + self, + ) -> Callable[ + [schema.RollbackSchemaRequest], Union[schema.Schema, Awaitable[schema.Schema]] + ]: + raise NotImplementedError() + + @property + def delete_schema_revision( + self, + ) -> Callable[ + [schema.DeleteSchemaRevisionRequest], + Union[schema.Schema, Awaitable[schema.Schema]], + ]: + raise NotImplementedError() + + @property + def delete_schema( + self, + ) -> Callable[ + [schema.DeleteSchemaRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: + raise NotImplementedError() + + @property + def validate_schema( + self, + ) -> Callable[ + [gp_schema.ValidateSchemaRequest], + Union[ + gp_schema.ValidateSchemaResponse, + Awaitable[gp_schema.ValidateSchemaResponse], + ], + ]: + raise NotImplementedError() + + @property + def validate_message( + self, + ) -> Callable[ + [schema.ValidateMessageRequest], + Union[ + schema.ValidateMessageResponse, Awaitable[schema.ValidateMessageResponse] + ], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SchemaServiceTransport",) diff --git a/google/pubsub_v1/services/schema_service/transports/grpc.py b/google/pubsub_v1/services/schema_service/transports/grpc.py new file mode 100644 index 000000000..5bcfd8b9b --- /dev/null +++ b/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -0,0 +1,678 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema +from .base import SchemaServiceTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class SchemaServiceGrpcTransport(SchemaServiceTransport): + """gRPC backend transport for SchemaService. + + Service for doing schema-related operations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_schema( + self, + ) -> Callable[[gp_schema.CreateSchemaRequest], gp_schema.Schema]: + r"""Return a callable for the create schema method over gRPC. + + Creates a schema. + + Returns: + Callable[[~.CreateSchemaRequest], + ~.Schema]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_schema" not in self._stubs: + self._stubs["create_schema"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/CreateSchema", + request_serializer=gp_schema.CreateSchemaRequest.serialize, + response_deserializer=gp_schema.Schema.deserialize, + ) + return self._stubs["create_schema"] + + @property + def get_schema(self) -> Callable[[schema.GetSchemaRequest], schema.Schema]: + r"""Return a callable for the get schema method over gRPC. + + Gets a schema. + + Returns: + Callable[[~.GetSchemaRequest], + ~.Schema]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_schema" not in self._stubs: + self._stubs["get_schema"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/GetSchema", + request_serializer=schema.GetSchemaRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs["get_schema"] + + @property + def list_schemas( + self, + ) -> Callable[[schema.ListSchemasRequest], schema.ListSchemasResponse]: + r"""Return a callable for the list schemas method over gRPC. + + Lists schemas in a project. + + Returns: + Callable[[~.ListSchemasRequest], + ~.ListSchemasResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_schemas" not in self._stubs: + self._stubs["list_schemas"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/ListSchemas", + request_serializer=schema.ListSchemasRequest.serialize, + response_deserializer=schema.ListSchemasResponse.deserialize, + ) + return self._stubs["list_schemas"] + + @property + def list_schema_revisions( + self, + ) -> Callable[ + [schema.ListSchemaRevisionsRequest], schema.ListSchemaRevisionsResponse + ]: + r"""Return a callable for the list schema revisions method over gRPC. + + Lists all schema revisions for the named schema. + + Returns: + Callable[[~.ListSchemaRevisionsRequest], + ~.ListSchemaRevisionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_schema_revisions" not in self._stubs: + self._stubs["list_schema_revisions"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/ListSchemaRevisions", + request_serializer=schema.ListSchemaRevisionsRequest.serialize, + response_deserializer=schema.ListSchemaRevisionsResponse.deserialize, + ) + return self._stubs["list_schema_revisions"] + + @property + def commit_schema( + self, + ) -> Callable[[gp_schema.CommitSchemaRequest], gp_schema.Schema]: + r"""Return a callable for the commit schema method over gRPC. + + Commits a new schema revision to an existing schema. + + Returns: + Callable[[~.CommitSchemaRequest], + ~.Schema]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "commit_schema" not in self._stubs: + self._stubs["commit_schema"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/CommitSchema", + request_serializer=gp_schema.CommitSchemaRequest.serialize, + response_deserializer=gp_schema.Schema.deserialize, + ) + return self._stubs["commit_schema"] + + @property + def rollback_schema( + self, + ) -> Callable[[schema.RollbackSchemaRequest], schema.Schema]: + r"""Return a callable for the rollback schema method over gRPC. + + Creates a new schema revision that is a copy of the provided + revision_id. + + Returns: + Callable[[~.RollbackSchemaRequest], + ~.Schema]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback_schema" not in self._stubs: + self._stubs["rollback_schema"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/RollbackSchema", + request_serializer=schema.RollbackSchemaRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs["rollback_schema"] + + @property + def delete_schema_revision( + self, + ) -> Callable[[schema.DeleteSchemaRevisionRequest], schema.Schema]: + r"""Return a callable for the delete schema revision method over gRPC. + + Deletes a specific schema revision. + + Returns: + Callable[[~.DeleteSchemaRevisionRequest], + ~.Schema]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_schema_revision" not in self._stubs: + self._stubs["delete_schema_revision"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/DeleteSchemaRevision", + request_serializer=schema.DeleteSchemaRevisionRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs["delete_schema_revision"] + + @property + def delete_schema(self) -> Callable[[schema.DeleteSchemaRequest], empty_pb2.Empty]: + r"""Return a callable for the delete schema method over gRPC. + + Deletes a schema. + + Returns: + Callable[[~.DeleteSchemaRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_schema" not in self._stubs: + self._stubs["delete_schema"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/DeleteSchema", + request_serializer=schema.DeleteSchemaRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_schema"] + + @property + def validate_schema( + self, + ) -> Callable[[gp_schema.ValidateSchemaRequest], gp_schema.ValidateSchemaResponse]: + r"""Return a callable for the validate schema method over gRPC. + + Validates a schema. + + Returns: + Callable[[~.ValidateSchemaRequest], + ~.ValidateSchemaResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "validate_schema" not in self._stubs: + self._stubs["validate_schema"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/ValidateSchema", + request_serializer=gp_schema.ValidateSchemaRequest.serialize, + response_deserializer=gp_schema.ValidateSchemaResponse.deserialize, + ) + return self._stubs["validate_schema"] + + @property + def validate_message( + self, + ) -> Callable[[schema.ValidateMessageRequest], schema.ValidateMessageResponse]: + r"""Return a callable for the validate message method over gRPC. + + Validates a message against a schema. + + Returns: + Callable[[~.ValidateMessageRequest], + ~.ValidateMessageResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "validate_message" not in self._stubs: + self._stubs["validate_message"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/ValidateMessage", + request_serializer=schema.ValidateMessageRequest.serialize, + response_deserializer=schema.ValidateMessageResponse.deserialize, + ) + return self._stubs["validate_message"] + + def close(self): + self._logged_channel.close() + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SchemaServiceGrpcTransport",) diff --git a/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py new file mode 100644 index 000000000..ac2980ded --- /dev/null +++ b/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -0,0 +1,860 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema +from .base import SchemaServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import SchemaServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class SchemaServiceGrpcAsyncIOTransport(SchemaServiceTransport): + """gRPC AsyncIO backend transport for SchemaService. + + Service for doing schema-related operations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_schema( + self, + ) -> Callable[[gp_schema.CreateSchemaRequest], Awaitable[gp_schema.Schema]]: + r"""Return a callable for the create schema method over gRPC. + + Creates a schema. + + Returns: + Callable[[~.CreateSchemaRequest], + Awaitable[~.Schema]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_schema" not in self._stubs: + self._stubs["create_schema"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/CreateSchema", + request_serializer=gp_schema.CreateSchemaRequest.serialize, + response_deserializer=gp_schema.Schema.deserialize, + ) + return self._stubs["create_schema"] + + @property + def get_schema( + self, + ) -> Callable[[schema.GetSchemaRequest], Awaitable[schema.Schema]]: + r"""Return a callable for the get schema method over gRPC. + + Gets a schema. + + Returns: + Callable[[~.GetSchemaRequest], + Awaitable[~.Schema]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_schema" not in self._stubs: + self._stubs["get_schema"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/GetSchema", + request_serializer=schema.GetSchemaRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs["get_schema"] + + @property + def list_schemas( + self, + ) -> Callable[[schema.ListSchemasRequest], Awaitable[schema.ListSchemasResponse]]: + r"""Return a callable for the list schemas method over gRPC. + + Lists schemas in a project. + + Returns: + Callable[[~.ListSchemasRequest], + Awaitable[~.ListSchemasResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_schemas" not in self._stubs: + self._stubs["list_schemas"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/ListSchemas", + request_serializer=schema.ListSchemasRequest.serialize, + response_deserializer=schema.ListSchemasResponse.deserialize, + ) + return self._stubs["list_schemas"] + + @property + def list_schema_revisions( + self, + ) -> Callable[ + [schema.ListSchemaRevisionsRequest], + Awaitable[schema.ListSchemaRevisionsResponse], + ]: + r"""Return a callable for the list schema revisions method over gRPC. + + Lists all schema revisions for the named schema. + + Returns: + Callable[[~.ListSchemaRevisionsRequest], + Awaitable[~.ListSchemaRevisionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_schema_revisions" not in self._stubs: + self._stubs["list_schema_revisions"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/ListSchemaRevisions", + request_serializer=schema.ListSchemaRevisionsRequest.serialize, + response_deserializer=schema.ListSchemaRevisionsResponse.deserialize, + ) + return self._stubs["list_schema_revisions"] + + @property + def commit_schema( + self, + ) -> Callable[[gp_schema.CommitSchemaRequest], Awaitable[gp_schema.Schema]]: + r"""Return a callable for the commit schema method over gRPC. + + Commits a new schema revision to an existing schema. + + Returns: + Callable[[~.CommitSchemaRequest], + Awaitable[~.Schema]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "commit_schema" not in self._stubs: + self._stubs["commit_schema"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/CommitSchema", + request_serializer=gp_schema.CommitSchemaRequest.serialize, + response_deserializer=gp_schema.Schema.deserialize, + ) + return self._stubs["commit_schema"] + + @property + def rollback_schema( + self, + ) -> Callable[[schema.RollbackSchemaRequest], Awaitable[schema.Schema]]: + r"""Return a callable for the rollback schema method over gRPC. + + Creates a new schema revision that is a copy of the provided + revision_id. + + Returns: + Callable[[~.RollbackSchemaRequest], + Awaitable[~.Schema]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback_schema" not in self._stubs: + self._stubs["rollback_schema"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/RollbackSchema", + request_serializer=schema.RollbackSchemaRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs["rollback_schema"] + + @property + def delete_schema_revision( + self, + ) -> Callable[[schema.DeleteSchemaRevisionRequest], Awaitable[schema.Schema]]: + r"""Return a callable for the delete schema revision method over gRPC. + + Deletes a specific schema revision. + + Returns: + Callable[[~.DeleteSchemaRevisionRequest], + Awaitable[~.Schema]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_schema_revision" not in self._stubs: + self._stubs["delete_schema_revision"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/DeleteSchemaRevision", + request_serializer=schema.DeleteSchemaRevisionRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs["delete_schema_revision"] + + @property + def delete_schema( + self, + ) -> Callable[[schema.DeleteSchemaRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete schema method over gRPC. + + Deletes a schema. + + Returns: + Callable[[~.DeleteSchemaRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_schema" not in self._stubs: + self._stubs["delete_schema"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/DeleteSchema", + request_serializer=schema.DeleteSchemaRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_schema"] + + @property + def validate_schema( + self, + ) -> Callable[ + [gp_schema.ValidateSchemaRequest], Awaitable[gp_schema.ValidateSchemaResponse] + ]: + r"""Return a callable for the validate schema method over gRPC. + + Validates a schema. + + Returns: + Callable[[~.ValidateSchemaRequest], + Awaitable[~.ValidateSchemaResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "validate_schema" not in self._stubs: + self._stubs["validate_schema"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/ValidateSchema", + request_serializer=gp_schema.ValidateSchemaRequest.serialize, + response_deserializer=gp_schema.ValidateSchemaResponse.deserialize, + ) + return self._stubs["validate_schema"] + + @property + def validate_message( + self, + ) -> Callable[ + [schema.ValidateMessageRequest], Awaitable[schema.ValidateMessageResponse] + ]: + r"""Return a callable for the validate message method over gRPC. + + Validates a message against a schema. + + Returns: + Callable[[~.ValidateMessageRequest], + Awaitable[~.ValidateMessageResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "validate_message" not in self._stubs: + self._stubs["validate_message"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.SchemaService/ValidateMessage", + request_serializer=schema.ValidateMessageRequest.serialize, + response_deserializer=schema.ValidateMessageResponse.deserialize, + ) + return self._stubs["validate_message"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_schema: self._wrap_method( + self.create_schema, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_schema: self._wrap_method( + self.get_schema, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_schemas: self._wrap_method( + self.list_schemas, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_schema_revisions: self._wrap_method( + self.list_schema_revisions, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.commit_schema: self._wrap_method( + self.commit_schema, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.rollback_schema: self._wrap_method( + self.rollback_schema, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_schema_revision: self._wrap_method( + self.delete_schema_revision, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_schema: self._wrap_method( + self.delete_schema, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.validate_schema: self._wrap_method( + self.validate_schema, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.validate_message: self._wrap_method( + self.validate_message, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("SchemaServiceGrpcAsyncIOTransport",) diff --git a/google/pubsub_v1/services/schema_service/transports/rest.py b/google/pubsub_v1/services/schema_service/transports/rest.py new file mode 100644 index 000000000..a0d42c2dd --- /dev/null +++ b/google/pubsub_v1/services/schema_service/transports/rest.py @@ -0,0 +1,2719 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema + + +from .rest_base import _BaseSchemaServiceRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class SchemaServiceRestInterceptor: + """Interceptor for SchemaService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SchemaServiceRestTransport. + + .. code-block:: python + class MyCustomSchemaServiceInterceptor(SchemaServiceRestInterceptor): + def pre_commit_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_commit_schema(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_schema(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_schema_revision(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_schema_revision(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_schema(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_schema_revisions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_schema_revisions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_schemas(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_schemas(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_rollback_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_rollback_schema(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_validate_message(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_validate_message(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_validate_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_validate_schema(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SchemaServiceRestTransport(interceptor=MyCustomSchemaServiceInterceptor()) + client = SchemaServiceClient(transport=transport) + + + """ + + def pre_commit_schema( + self, + request: gp_schema.CommitSchemaRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gp_schema.CommitSchemaRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for commit_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_commit_schema(self, response: gp_schema.Schema) -> gp_schema.Schema: + """Post-rpc interceptor for commit_schema + + DEPRECATED. Please use the `post_commit_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. This `post_commit_schema` interceptor runs + before the `post_commit_schema_with_metadata` interceptor. + """ + return response + + def post_commit_schema_with_metadata( + self, + response: gp_schema.Schema, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gp_schema.Schema, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for commit_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_commit_schema_with_metadata` + interceptor in new development instead of the `post_commit_schema` interceptor. + When both interceptors are used, this `post_commit_schema_with_metadata` interceptor runs after the + `post_commit_schema` interceptor. The (possibly modified) response returned by + `post_commit_schema` will be passed to + `post_commit_schema_with_metadata`. + """ + return response, metadata + + def pre_create_schema( + self, + request: gp_schema.CreateSchemaRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gp_schema.CreateSchemaRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_create_schema(self, response: gp_schema.Schema) -> gp_schema.Schema: + """Post-rpc interceptor for create_schema + + DEPRECATED. Please use the `post_create_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. This `post_create_schema` interceptor runs + before the `post_create_schema_with_metadata` interceptor. + """ + return response + + def post_create_schema_with_metadata( + self, + response: gp_schema.Schema, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gp_schema.Schema, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_create_schema_with_metadata` + interceptor in new development instead of the `post_create_schema` interceptor. + When both interceptors are used, this `post_create_schema_with_metadata` interceptor runs after the + `post_create_schema` interceptor. The (possibly modified) response returned by + `post_create_schema` will be passed to + `post_create_schema_with_metadata`. + """ + return response, metadata + + def pre_delete_schema( + self, + request: schema.DeleteSchemaRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schema.DeleteSchemaRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def pre_delete_schema_revision( + self, + request: schema.DeleteSchemaRevisionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + schema.DeleteSchemaRevisionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_schema_revision + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_delete_schema_revision(self, response: schema.Schema) -> schema.Schema: + """Post-rpc interceptor for delete_schema_revision + + DEPRECATED. Please use the `post_delete_schema_revision_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. This `post_delete_schema_revision` interceptor runs + before the `post_delete_schema_revision_with_metadata` interceptor. + """ + return response + + def post_delete_schema_revision_with_metadata( + self, response: schema.Schema, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[schema.Schema, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_schema_revision + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_delete_schema_revision_with_metadata` + interceptor in new development instead of the `post_delete_schema_revision` interceptor. + When both interceptors are used, this `post_delete_schema_revision_with_metadata` interceptor runs after the + `post_delete_schema_revision` interceptor. The (possibly modified) response returned by + `post_delete_schema_revision` will be passed to + `post_delete_schema_revision_with_metadata`. + """ + return response, metadata + + def pre_get_schema( + self, + request: schema.GetSchemaRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schema.GetSchemaRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_get_schema(self, response: schema.Schema) -> schema.Schema: + """Post-rpc interceptor for get_schema + + DEPRECATED. Please use the `post_get_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. This `post_get_schema` interceptor runs + before the `post_get_schema_with_metadata` interceptor. + """ + return response + + def post_get_schema_with_metadata( + self, response: schema.Schema, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[schema.Schema, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_get_schema_with_metadata` + interceptor in new development instead of the `post_get_schema` interceptor. + When both interceptors are used, this `post_get_schema_with_metadata` interceptor runs after the + `post_get_schema` interceptor. The (possibly modified) response returned by + `post_get_schema` will be passed to + `post_get_schema_with_metadata`. + """ + return response, metadata + + def pre_list_schema_revisions( + self, + request: schema.ListSchemaRevisionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + schema.ListSchemaRevisionsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_schema_revisions + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_list_schema_revisions( + self, response: schema.ListSchemaRevisionsResponse + ) -> schema.ListSchemaRevisionsResponse: + """Post-rpc interceptor for list_schema_revisions + + DEPRECATED. Please use the `post_list_schema_revisions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. This `post_list_schema_revisions` interceptor runs + before the `post_list_schema_revisions_with_metadata` interceptor. + """ + return response + + def post_list_schema_revisions_with_metadata( + self, + response: schema.ListSchemaRevisionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + schema.ListSchemaRevisionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_schema_revisions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_list_schema_revisions_with_metadata` + interceptor in new development instead of the `post_list_schema_revisions` interceptor. + When both interceptors are used, this `post_list_schema_revisions_with_metadata` interceptor runs after the + `post_list_schema_revisions` interceptor. The (possibly modified) response returned by + `post_list_schema_revisions` will be passed to + `post_list_schema_revisions_with_metadata`. + """ + return response, metadata + + def pre_list_schemas( + self, + request: schema.ListSchemasRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schema.ListSchemasRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_schemas + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_list_schemas( + self, response: schema.ListSchemasResponse + ) -> schema.ListSchemasResponse: + """Post-rpc interceptor for list_schemas + + DEPRECATED. Please use the `post_list_schemas_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. This `post_list_schemas` interceptor runs + before the `post_list_schemas_with_metadata` interceptor. + """ + return response + + def post_list_schemas_with_metadata( + self, + response: schema.ListSchemasResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schema.ListSchemasResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_schemas + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_list_schemas_with_metadata` + interceptor in new development instead of the `post_list_schemas` interceptor. + When both interceptors are used, this `post_list_schemas_with_metadata` interceptor runs after the + `post_list_schemas` interceptor. The (possibly modified) response returned by + `post_list_schemas` will be passed to + `post_list_schemas_with_metadata`. + """ + return response, metadata + + def pre_rollback_schema( + self, + request: schema.RollbackSchemaRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schema.RollbackSchemaRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for rollback_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_rollback_schema(self, response: schema.Schema) -> schema.Schema: + """Post-rpc interceptor for rollback_schema + + DEPRECATED. Please use the `post_rollback_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. This `post_rollback_schema` interceptor runs + before the `post_rollback_schema_with_metadata` interceptor. + """ + return response + + def post_rollback_schema_with_metadata( + self, response: schema.Schema, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[schema.Schema, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rollback_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_rollback_schema_with_metadata` + interceptor in new development instead of the `post_rollback_schema` interceptor. + When both interceptors are used, this `post_rollback_schema_with_metadata` interceptor runs after the + `post_rollback_schema` interceptor. The (possibly modified) response returned by + `post_rollback_schema` will be passed to + `post_rollback_schema_with_metadata`. + """ + return response, metadata + + def pre_validate_message( + self, + request: schema.ValidateMessageRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schema.ValidateMessageRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for validate_message + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_validate_message( + self, response: schema.ValidateMessageResponse + ) -> schema.ValidateMessageResponse: + """Post-rpc interceptor for validate_message + + DEPRECATED. Please use the `post_validate_message_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. This `post_validate_message` interceptor runs + before the `post_validate_message_with_metadata` interceptor. + """ + return response + + def post_validate_message_with_metadata( + self, + response: schema.ValidateMessageResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schema.ValidateMessageResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for validate_message + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_validate_message_with_metadata` + interceptor in new development instead of the `post_validate_message` interceptor. + When both interceptors are used, this `post_validate_message_with_metadata` interceptor runs after the + `post_validate_message` interceptor. The (possibly modified) response returned by + `post_validate_message` will be passed to + `post_validate_message_with_metadata`. + """ + return response, metadata + + def pre_validate_schema( + self, + request: gp_schema.ValidateSchemaRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gp_schema.ValidateSchemaRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for validate_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_validate_schema( + self, response: gp_schema.ValidateSchemaResponse + ) -> gp_schema.ValidateSchemaResponse: + """Post-rpc interceptor for validate_schema + + DEPRECATED. Please use the `post_validate_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. This `post_validate_schema` interceptor runs + before the `post_validate_schema_with_metadata` interceptor. + """ + return response + + def post_validate_schema_with_metadata( + self, + response: gp_schema.ValidateSchemaResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gp_schema.ValidateSchemaResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for validate_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_validate_schema_with_metadata` + interceptor in new development instead of the `post_validate_schema` interceptor. + When both interceptors are used, this `post_validate_schema_with_metadata` interceptor runs after the + `post_validate_schema` interceptor. The (possibly modified) response returned by + `post_validate_schema` will be passed to + `post_validate_schema_with_metadata`. + """ + return response, metadata + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SchemaServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SchemaServiceRestInterceptor + + +class SchemaServiceRestTransport(_BaseSchemaServiceRestTransport): + """REST backend synchronous transport for SchemaService. + + Service for doing schema-related operations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SchemaServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SchemaServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CommitSchema( + _BaseSchemaServiceRestTransport._BaseCommitSchema, SchemaServiceRestStub + ): + def __hash__(self): + return hash("SchemaServiceRestTransport.CommitSchema") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gp_schema.CommitSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gp_schema.Schema: + r"""Call the commit schema method over HTTP. + + Args: + request (~.gp_schema.CommitSchemaRequest): + The request object. Request for CommitSchema method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gp_schema.Schema: + A schema resource. + """ + + http_options = ( + _BaseSchemaServiceRestTransport._BaseCommitSchema._get_http_options() + ) + + request, metadata = self._interceptor.pre_commit_schema(request, metadata) + transcoded_request = _BaseSchemaServiceRestTransport._BaseCommitSchema._get_transcoded_request( + http_options, request + ) + + body = _BaseSchemaServiceRestTransport._BaseCommitSchema._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSchemaServiceRestTransport._BaseCommitSchema._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.CommitSchema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "CommitSchema", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SchemaServiceRestTransport._CommitSchema._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gp_schema.Schema() + pb_resp = gp_schema.Schema.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_commit_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_commit_schema_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gp_schema.Schema.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.commit_schema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "CommitSchema", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateSchema( + _BaseSchemaServiceRestTransport._BaseCreateSchema, SchemaServiceRestStub + ): + def __hash__(self): + return hash("SchemaServiceRestTransport.CreateSchema") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gp_schema.CreateSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gp_schema.Schema: + r"""Call the create schema method over HTTP. + + Args: + request (~.gp_schema.CreateSchemaRequest): + The request object. Request for the CreateSchema method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gp_schema.Schema: + A schema resource. + """ + + http_options = ( + _BaseSchemaServiceRestTransport._BaseCreateSchema._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_schema(request, metadata) + transcoded_request = _BaseSchemaServiceRestTransport._BaseCreateSchema._get_transcoded_request( + http_options, request + ) + + body = _BaseSchemaServiceRestTransport._BaseCreateSchema._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSchemaServiceRestTransport._BaseCreateSchema._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.CreateSchema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "CreateSchema", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SchemaServiceRestTransport._CreateSchema._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gp_schema.Schema() + pb_resp = gp_schema.Schema.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_schema_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gp_schema.Schema.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.create_schema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "CreateSchema", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteSchema( + _BaseSchemaServiceRestTransport._BaseDeleteSchema, SchemaServiceRestStub + ): + def __hash__(self): + return hash("SchemaServiceRestTransport.DeleteSchema") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: schema.DeleteSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete schema method over HTTP. + + Args: + request (~.schema.DeleteSchemaRequest): + The request object. Request for the ``DeleteSchema`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseSchemaServiceRestTransport._BaseDeleteSchema._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_schema(request, metadata) + transcoded_request = _BaseSchemaServiceRestTransport._BaseDeleteSchema._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSchemaServiceRestTransport._BaseDeleteSchema._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.DeleteSchema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "DeleteSchema", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SchemaServiceRestTransport._DeleteSchema._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteSchemaRevision( + _BaseSchemaServiceRestTransport._BaseDeleteSchemaRevision, SchemaServiceRestStub + ): + def __hash__(self): + return hash("SchemaServiceRestTransport.DeleteSchemaRevision") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: schema.DeleteSchemaRevisionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> schema.Schema: + r"""Call the delete schema revision method over HTTP. + + Args: + request (~.schema.DeleteSchemaRevisionRequest): + The request object. Request for the ``DeleteSchemaRevision`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.schema.Schema: + A schema resource. + """ + + http_options = ( + _BaseSchemaServiceRestTransport._BaseDeleteSchemaRevision._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_schema_revision( + request, metadata + ) + transcoded_request = _BaseSchemaServiceRestTransport._BaseDeleteSchemaRevision._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSchemaServiceRestTransport._BaseDeleteSchemaRevision._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.DeleteSchemaRevision", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "DeleteSchemaRevision", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SchemaServiceRestTransport._DeleteSchemaRevision._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schema.Schema() + pb_resp = schema.Schema.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_schema_revision(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_schema_revision_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = schema.Schema.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.delete_schema_revision", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "DeleteSchemaRevision", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetSchema( + _BaseSchemaServiceRestTransport._BaseGetSchema, SchemaServiceRestStub + ): + def __hash__(self): + return hash("SchemaServiceRestTransport.GetSchema") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: schema.GetSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> schema.Schema: + r"""Call the get schema method over HTTP. + + Args: + request (~.schema.GetSchemaRequest): + The request object. Request for the GetSchema method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.schema.Schema: + A schema resource. + """ + + http_options = ( + _BaseSchemaServiceRestTransport._BaseGetSchema._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_schema(request, metadata) + transcoded_request = ( + _BaseSchemaServiceRestTransport._BaseGetSchema._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseSchemaServiceRestTransport._BaseGetSchema._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.GetSchema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "GetSchema", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SchemaServiceRestTransport._GetSchema._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schema.Schema() + pb_resp = schema.Schema.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_schema_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = schema.Schema.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.get_schema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "GetSchema", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListSchemaRevisions( + _BaseSchemaServiceRestTransport._BaseListSchemaRevisions, SchemaServiceRestStub + ): + def __hash__(self): + return hash("SchemaServiceRestTransport.ListSchemaRevisions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: schema.ListSchemaRevisionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> schema.ListSchemaRevisionsResponse: + r"""Call the list schema revisions method over HTTP. + + Args: + request (~.schema.ListSchemaRevisionsRequest): + The request object. Request for the ``ListSchemaRevisions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.schema.ListSchemaRevisionsResponse: + Response for the ``ListSchemaRevisions`` method. + """ + + http_options = ( + _BaseSchemaServiceRestTransport._BaseListSchemaRevisions._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_schema_revisions( + request, metadata + ) + transcoded_request = _BaseSchemaServiceRestTransport._BaseListSchemaRevisions._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSchemaServiceRestTransport._BaseListSchemaRevisions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.ListSchemaRevisions", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "ListSchemaRevisions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SchemaServiceRestTransport._ListSchemaRevisions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schema.ListSchemaRevisionsResponse() + pb_resp = schema.ListSchemaRevisionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_schema_revisions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_schema_revisions_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = schema.ListSchemaRevisionsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.list_schema_revisions", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "ListSchemaRevisions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListSchemas( + _BaseSchemaServiceRestTransport._BaseListSchemas, SchemaServiceRestStub + ): + def __hash__(self): + return hash("SchemaServiceRestTransport.ListSchemas") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: schema.ListSchemasRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> schema.ListSchemasResponse: + r"""Call the list schemas method over HTTP. + + Args: + request (~.schema.ListSchemasRequest): + The request object. Request for the ``ListSchemas`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.schema.ListSchemasResponse: + Response for the ``ListSchemas`` method. + """ + + http_options = ( + _BaseSchemaServiceRestTransport._BaseListSchemas._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_schemas(request, metadata) + transcoded_request = _BaseSchemaServiceRestTransport._BaseListSchemas._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = ( + _BaseSchemaServiceRestTransport._BaseListSchemas._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.ListSchemas", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "ListSchemas", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SchemaServiceRestTransport._ListSchemas._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schema.ListSchemasResponse() + pb_resp = schema.ListSchemasResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_schemas(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_schemas_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = schema.ListSchemasResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.list_schemas", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "ListSchemas", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _RollbackSchema( + _BaseSchemaServiceRestTransport._BaseRollbackSchema, SchemaServiceRestStub + ): + def __hash__(self): + return hash("SchemaServiceRestTransport.RollbackSchema") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: schema.RollbackSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> schema.Schema: + r"""Call the rollback schema method over HTTP. + + Args: + request (~.schema.RollbackSchemaRequest): + The request object. Request for the ``RollbackSchema`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.schema.Schema: + A schema resource. + """ + + http_options = ( + _BaseSchemaServiceRestTransport._BaseRollbackSchema._get_http_options() + ) + + request, metadata = self._interceptor.pre_rollback_schema(request, metadata) + transcoded_request = _BaseSchemaServiceRestTransport._BaseRollbackSchema._get_transcoded_request( + http_options, request + ) + + body = _BaseSchemaServiceRestTransport._BaseRollbackSchema._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSchemaServiceRestTransport._BaseRollbackSchema._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.RollbackSchema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "RollbackSchema", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SchemaServiceRestTransport._RollbackSchema._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schema.Schema() + pb_resp = schema.Schema.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_rollback_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rollback_schema_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = schema.Schema.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.rollback_schema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "RollbackSchema", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ValidateMessage( + _BaseSchemaServiceRestTransport._BaseValidateMessage, SchemaServiceRestStub + ): + def __hash__(self): + return hash("SchemaServiceRestTransport.ValidateMessage") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: schema.ValidateMessageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> schema.ValidateMessageResponse: + r"""Call the validate message method over HTTP. + + Args: + request (~.schema.ValidateMessageRequest): + The request object. Request for the ``ValidateMessage`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.schema.ValidateMessageResponse: + Response for the ``ValidateMessage`` method. Empty for + now. + + """ + + http_options = ( + _BaseSchemaServiceRestTransport._BaseValidateMessage._get_http_options() + ) + + request, metadata = self._interceptor.pre_validate_message( + request, metadata + ) + transcoded_request = _BaseSchemaServiceRestTransport._BaseValidateMessage._get_transcoded_request( + http_options, request + ) + + body = _BaseSchemaServiceRestTransport._BaseValidateMessage._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSchemaServiceRestTransport._BaseValidateMessage._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.ValidateMessage", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "ValidateMessage", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SchemaServiceRestTransport._ValidateMessage._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schema.ValidateMessageResponse() + pb_resp = schema.ValidateMessageResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_validate_message(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_validate_message_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = schema.ValidateMessageResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.validate_message", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "ValidateMessage", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ValidateSchema( + _BaseSchemaServiceRestTransport._BaseValidateSchema, SchemaServiceRestStub + ): + def __hash__(self): + return hash("SchemaServiceRestTransport.ValidateSchema") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gp_schema.ValidateSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gp_schema.ValidateSchemaResponse: + r"""Call the validate schema method over HTTP. + + Args: + request (~.gp_schema.ValidateSchemaRequest): + The request object. Request for the ``ValidateSchema`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gp_schema.ValidateSchemaResponse: + Response for the ``ValidateSchema`` method. Empty for + now. + + """ + + http_options = ( + _BaseSchemaServiceRestTransport._BaseValidateSchema._get_http_options() + ) + + request, metadata = self._interceptor.pre_validate_schema(request, metadata) + transcoded_request = _BaseSchemaServiceRestTransport._BaseValidateSchema._get_transcoded_request( + http_options, request + ) + + body = _BaseSchemaServiceRestTransport._BaseValidateSchema._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSchemaServiceRestTransport._BaseValidateSchema._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.ValidateSchema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "ValidateSchema", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SchemaServiceRestTransport._ValidateSchema._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gp_schema.ValidateSchemaResponse() + pb_resp = gp_schema.ValidateSchemaResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_validate_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_validate_schema_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gp_schema.ValidateSchemaResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.validate_schema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "ValidateSchema", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def commit_schema( + self, + ) -> Callable[[gp_schema.CommitSchemaRequest], gp_schema.Schema]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CommitSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_schema( + self, + ) -> Callable[[gp_schema.CreateSchemaRequest], gp_schema.Schema]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_schema(self) -> Callable[[schema.DeleteSchemaRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_schema_revision( + self, + ) -> Callable[[schema.DeleteSchemaRevisionRequest], schema.Schema]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSchemaRevision(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_schema(self) -> Callable[[schema.GetSchemaRequest], schema.Schema]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_schema_revisions( + self, + ) -> Callable[ + [schema.ListSchemaRevisionsRequest], schema.ListSchemaRevisionsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSchemaRevisions(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_schemas( + self, + ) -> Callable[[schema.ListSchemasRequest], schema.ListSchemasResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSchemas(self._session, self._host, self._interceptor) # type: ignore + + @property + def rollback_schema( + self, + ) -> Callable[[schema.RollbackSchemaRequest], schema.Schema]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RollbackSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def validate_message( + self, + ) -> Callable[[schema.ValidateMessageRequest], schema.ValidateMessageResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ValidateMessage(self._session, self._host, self._interceptor) # type: ignore + + @property + def validate_schema( + self, + ) -> Callable[[gp_schema.ValidateSchemaRequest], gp_schema.ValidateSchemaResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ValidateSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy( + _BaseSchemaServiceRestTransport._BaseGetIamPolicy, SchemaServiceRestStub + ): + def __hash__(self): + return hash("SchemaServiceRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options = ( + _BaseSchemaServiceRestTransport._BaseGetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseSchemaServiceRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSchemaServiceRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.GetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SchemaServiceRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceAsyncClient.GetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy( + _BaseSchemaServiceRestTransport._BaseSetIamPolicy, SchemaServiceRestStub + ): + def __hash__(self): + return hash("SchemaServiceRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options = ( + _BaseSchemaServiceRestTransport._BaseSetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseSchemaServiceRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseSchemaServiceRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSchemaServiceRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.SetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SchemaServiceRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceAsyncClient.SetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions( + _BaseSchemaServiceRestTransport._BaseTestIamPermissions, SchemaServiceRestStub + ): + def __hash__(self): + return hash("SchemaServiceRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options = ( + _BaseSchemaServiceRestTransport._BaseTestIamPermissions._get_http_options() + ) + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseSchemaServiceRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseSchemaServiceRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSchemaServiceRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.TestIamPermissions", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SchemaServiceRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceAsyncClient.TestIamPermissions", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SchemaServiceRestTransport",) diff --git a/google/pubsub_v1/services/schema_service/transports/rest_base.py b/google/pubsub_v1/services/schema_service/transports/rest_base.py new file mode 100644 index 000000000..0ce5285bd --- /dev/null +++ b/google/pubsub_v1/services/schema_service/transports/rest_base.py @@ -0,0 +1,746 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from .base import SchemaServiceTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema + + +class _BaseSchemaServiceRestTransport(SchemaServiceTransport): + """Base REST backend transport for SchemaService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCommitSchema: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/schemas/*}:commit", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gp_schema.CommitSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseCommitSchema._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateSchema: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/schemas", + "body": "schema", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gp_schema.CreateSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseCreateSchema._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteSchema: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/schemas/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = schema.DeleteSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseDeleteSchema._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteSchemaRevision: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/schemas/*}:deleteRevision", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = schema.DeleteSchemaRevisionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseDeleteSchemaRevision._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetSchema: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/schemas/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = schema.GetSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseGetSchema._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSchemaRevisions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/schemas/*}:listRevisions", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = schema.ListSchemaRevisionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseListSchemaRevisions._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSchemas: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*}/schemas", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = schema.ListSchemasRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseListSchemas._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRollbackSchema: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/schemas/*}:rollback", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = schema.RollbackSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseRollbackSchema._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseValidateMessage: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/schemas:validateMessage", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = schema.ValidateMessageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseValidateMessage._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseValidateSchema: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/schemas:validate", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gp_schema.ValidateSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseValidateSchema._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/topics/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/subscriptions/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/snapshots/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/schemas/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseSchemaServiceRestTransport",) diff --git a/google/pubsub_v1/services/subscriber/__init__.py b/google/pubsub_v1/services/subscriber/__init__.py new file mode 100644 index 000000000..0e651adb7 --- /dev/null +++ b/google/pubsub_v1/services/subscriber/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import SubscriberClient +from .async_client import SubscriberAsyncClient + +__all__ = ( + "SubscriberClient", + "SubscriberAsyncClient", +) diff --git a/google/pubsub_v1/services/subscriber/async_client.py b/google/pubsub_v1/services/subscriber/async_client.py new file mode 100644 index 000000000..34843a0e0 --- /dev/null +++ b/google/pubsub_v1/services/subscriber/async_client.py @@ -0,0 +1,2645 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +from collections import OrderedDict +import re +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + AsyncIterable, + Awaitable, + AsyncIterator, + Sequence, + Tuple, + Type, + Union, +) + +import warnings +from google.pubsub_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.pubsub_v1.services.subscriber import pagers +from google.pubsub_v1.types import pubsub +from .transports.base import SubscriberTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import SubscriberGrpcAsyncIOTransport +from .client import SubscriberClient + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class SubscriberAsyncClient: + """The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + """ + + _client: SubscriberClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = SubscriberClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SubscriberClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = SubscriberClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = SubscriberClient._DEFAULT_UNIVERSE + + listing_path = staticmethod(SubscriberClient.listing_path) + parse_listing_path = staticmethod(SubscriberClient.parse_listing_path) + snapshot_path = staticmethod(SubscriberClient.snapshot_path) + parse_snapshot_path = staticmethod(SubscriberClient.parse_snapshot_path) + subscription_path = staticmethod(SubscriberClient.subscription_path) + parse_subscription_path = staticmethod(SubscriberClient.parse_subscription_path) + topic_path = staticmethod(SubscriberClient.topic_path) + parse_topic_path = staticmethod(SubscriberClient.parse_topic_path) + common_billing_account_path = staticmethod( + SubscriberClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SubscriberClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(SubscriberClient.common_folder_path) + parse_common_folder_path = staticmethod(SubscriberClient.parse_common_folder_path) + common_organization_path = staticmethod(SubscriberClient.common_organization_path) + parse_common_organization_path = staticmethod( + SubscriberClient.parse_common_organization_path + ) + common_project_path = staticmethod(SubscriberClient.common_project_path) + parse_common_project_path = staticmethod(SubscriberClient.parse_common_project_path) + common_location_path = staticmethod(SubscriberClient.common_location_path) + parse_common_location_path = staticmethod( + SubscriberClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SubscriberAsyncClient: The constructed client. + """ + return SubscriberClient.from_service_account_info.__func__(SubscriberAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SubscriberAsyncClient: The constructed client. + """ + return SubscriberClient.from_service_account_file.__func__(SubscriberAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SubscriberClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SubscriberTransport: + """Returns the transport used by the client instance. + + Returns: + SubscriberTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = SubscriberClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, SubscriberTransport, Callable[..., SubscriberTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the subscriber async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SubscriberTransport,Callable[..., SubscriberTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SubscriberTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SubscriberClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.pubsub_v1.SubscriberAsyncClient`.", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.pubsub.v1.Subscriber", + "credentialsType": None, + }, + ) + + async def create_subscription( + self, + request: Optional[Union[pubsub.Subscription, dict]] = None, + *, + name: Optional[str] = None, + topic: Optional[str] = None, + push_config: Optional[pubsub.PushConfig] = None, + ack_deadline_seconds: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Subscription: + r"""Creates a subscription to a given topic. See the [resource name + rules] + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + If the subscription already exists, returns ``ALREADY_EXISTS``. + If the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will + assign a random name for this subscription on the same project + as the topic, conforming to the [resource name format] + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + The generated name is populated in the returned Subscription + object. Note that for REST API requests, you must specify a name + in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_create_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.Subscription( + name="name_value", + topic="topic_value", + ) + + # Make the request + response = await client.create_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.Subscription, dict]]): + The request object. A subscription resource. If none of ``push_config``, + ``bigquery_config``, or ``cloud_storage_config`` is set, + then the subscriber will pull and ack messages using API + methods. At most one of these fields may be set. + name (:class:`str`): + Required. The name of the subscription. It must have the + format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain + only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes + (``-``), underscores (``_``), periods (``.``), tildes + (``~``), plus (``+``) or percent signs (``%``). It must + be between 3 and 255 characters in length, and it must + not start with ``"goog"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + topic (:class:`str`): + Required. The name of the topic from which this + subscription is receiving messages. Format is + ``projects/{project}/topics/{topic}``. The value of this + field will be ``_deleted-topic_`` if the topic has been + deleted. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + push_config (:class:`google.pubsub_v1.types.PushConfig`): + Optional. If push delivery is used + with this subscription, this field is + used to configure it. + + This corresponds to the ``push_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_deadline_seconds (:class:`int`): + Optional. The approximate amount of time (on a + best-effort basis) Pub/Sub waits for the subscriber to + acknowledge receipt before resending the message. In the + interval after the message is delivered and before it is + acknowledged, it is considered to be *outstanding*. + During that time period, the message will not be + redelivered (on a best-effort basis). + + For pull subscriptions, this value is used as the + initial value for the ack deadline. To override this + value for a given message, call ``ModifyAckDeadline`` + with the corresponding ``ack_id`` if using non-streaming + pull or send the ``ack_id`` in a + ``StreamingModifyAckDeadlineRequest`` if using streaming + pull. The minimum custom deadline you can specify is 10 + seconds. The maximum custom deadline you can specify is + 600 seconds (10 minutes). If this parameter is 0, a + default value of 10 seconds is used. + + For push delivery, this value is also used to set the + request timeout for the call to the push endpoint. + + If the subscriber never acknowledges the message, the + Pub/Sub system will eventually redeliver the message. + + This corresponds to the ``ack_deadline_seconds`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Subscription: + A subscription resource. If none of push_config, bigquery_config, or + cloud_storage_config is set, then the subscriber will + pull and ack messages using API methods. At most one + of these fields may be set. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, topic, push_config, ack_deadline_seconds] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.Subscription): + request = pubsub.Subscription(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if topic is not None: + request.topic = topic + if push_config is not None: + request.push_config = push_config + if ack_deadline_seconds is not None: + request.ack_deadline_seconds = ack_deadline_seconds + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_subscription + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_subscription( + self, + request: Optional[Union[pubsub.GetSubscriptionRequest, dict]] = None, + *, + subscription: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Subscription: + r"""Gets the configuration details of a subscription. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_get_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.get_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.GetSubscriptionRequest, dict]]): + The request object. Request for the GetSubscription + method. + subscription (:class:`str`): + Required. The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Subscription: + A subscription resource. If none of push_config, bigquery_config, or + cloud_storage_config is set, then the subscriber will + pull and ack messages using API methods. At most one + of these fields may be set. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [subscription] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.GetSubscriptionRequest): + request = pubsub.GetSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_subscription + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_subscription( + self, + request: Optional[Union[pubsub.UpdateSubscriptionRequest, dict]] = None, + *, + subscription: Optional[pubsub.Subscription] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Subscription: + r"""Updates an existing subscription by updating the + fields specified in the update mask. Note that certain + properties of a subscription, such as its topic, are not + modifiable. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_update_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + subscription = pubsub_v1.Subscription() + subscription.name = "name_value" + subscription.topic = "topic_value" + + request = pubsub_v1.UpdateSubscriptionRequest( + subscription=subscription, + ) + + # Make the request + response = await client.update_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.UpdateSubscriptionRequest, dict]]): + The request object. Request for the UpdateSubscription + method. + subscription (:class:`google.pubsub_v1.types.Subscription`): + Required. The updated subscription + object. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Indicates which fields in + the provided subscription to update. + Must be specified and non-empty. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Subscription: + A subscription resource. If none of push_config, bigquery_config, or + cloud_storage_config is set, then the subscriber will + pull and ack messages using API methods. At most one + of these fields may be set. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [subscription, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.UpdateSubscriptionRequest): + request = pubsub.UpdateSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_subscription + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription.name", request.subscription.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_subscriptions( + self, + request: Optional[Union[pubsub.ListSubscriptionsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSubscriptionsAsyncPager: + r"""Lists matching subscriptions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_list_subscriptions(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSubscriptionsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_subscriptions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.ListSubscriptionsRequest, dict]]): + The request object. Request for the ``ListSubscriptions`` method. + project (:class:`str`): + Required. The name of the project in which to list + subscriptions. Format is ``projects/{project-id}``. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsAsyncPager: + Response for the ListSubscriptions method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ListSubscriptionsRequest): + request = pubsub.ListSubscriptionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_subscriptions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSubscriptionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_subscription( + self, + request: Optional[Union[pubsub.DeleteSubscriptionRequest, dict]] = None, + *, + subscription: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after + deletion will return ``NOT_FOUND``. After a subscription is + deleted, a new one may be created with the same name, but the + new one has no association with the old subscription or its + topic unless the same topic is specified. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_delete_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + await client.delete_subscription(request=request) + + Args: + request (Optional[Union[google.pubsub_v1.types.DeleteSubscriptionRequest, dict]]): + The request object. Request for the DeleteSubscription + method. + subscription (:class:`str`): + Required. The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [subscription] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.DeleteSubscriptionRequest): + request = pubsub.DeleteSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_subscription + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def modify_ack_deadline( + self, + request: Optional[Union[pubsub.ModifyAckDeadlineRequest, dict]] = None, + *, + subscription: Optional[str] = None, + ack_ids: Optional[MutableSequence[str]] = None, + ack_deadline_seconds: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message + by the subscriber, or to make the message available for + redelivery if the processing was interrupted. Note that this + does not modify the subscription-level ``ackDeadlineSeconds`` + used for subsequent messages. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_modify_ack_deadline(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyAckDeadlineRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value1', 'ack_ids_value2'], + ack_deadline_seconds=2066, + ) + + # Make the request + await client.modify_ack_deadline(request=request) + + Args: + request (Optional[Union[google.pubsub_v1.types.ModifyAckDeadlineRequest, dict]]): + The request object. Request for the ModifyAckDeadline + method. + subscription (:class:`str`): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_ids (:class:`MutableSequence[str]`): + Required. List of acknowledgment IDs. + This corresponds to the ``ack_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_deadline_seconds (:class:`int`): + Required. The new ack deadline with respect to the time + this request was sent to the Pub/Sub system. For + example, if the value is 10, the new ack deadline will + expire 10 seconds after the ``ModifyAckDeadline`` call + was made. Specifying zero might immediately make the + message available for delivery to another subscriber + client. This typically results in an increase in the + rate of message redeliveries (that is, duplicates). The + minimum deadline you can specify is 0 seconds. The + maximum deadline you can specify in a single request is + 600 seconds (10 minutes). + + This corresponds to the ``ack_deadline_seconds`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [subscription, ack_ids, ack_deadline_seconds] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ModifyAckDeadlineRequest): + request = pubsub.ModifyAckDeadlineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if ack_deadline_seconds is not None: + request.ack_deadline_seconds = ack_deadline_seconds + if ack_ids: + request.ack_ids.extend(ack_ids) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.modify_ack_deadline + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def acknowledge( + self, + request: Optional[Union[pubsub.AcknowledgeRequest, dict]] = None, + *, + subscription: Optional[str] = None, + ack_ids: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the + relevant messages from the subscription. + + Acknowledging a message whose ack deadline has expired may + succeed, but such a message may be redelivered later. + Acknowledging a message more than once will not result in an + error. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_acknowledge(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.AcknowledgeRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value1', 'ack_ids_value2'], + ) + + # Make the request + await client.acknowledge(request=request) + + Args: + request (Optional[Union[google.pubsub_v1.types.AcknowledgeRequest, dict]]): + The request object. Request for the Acknowledge method. + subscription (:class:`str`): + Required. The subscription whose message is being + acknowledged. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_ids (:class:`MutableSequence[str]`): + Required. The acknowledgment ID for the messages being + acknowledged that was returned by the Pub/Sub system in + the ``Pull`` response. Must not be empty. + + This corresponds to the ``ack_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [subscription, ack_ids] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.AcknowledgeRequest): + request = pubsub.AcknowledgeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if ack_ids: + request.ack_ids.extend(ack_ids) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.acknowledge + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def pull( + self, + request: Optional[Union[pubsub.PullRequest, dict]] = None, + *, + subscription: Optional[str] = None, + return_immediately: Optional[bool] = None, + max_messages: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.PullResponse: + r"""Pulls messages from the server. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_pull(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.PullRequest( + subscription="subscription_value", + max_messages=1277, + ) + + # Make the request + response = await client.pull(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.PullRequest, dict]]): + The request object. Request for the ``Pull`` method. + subscription (:class:`str`): + Required. The subscription from which messages should be + pulled. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + return_immediately (:class:`bool`): + Optional. If this field set to true, the system will + respond immediately even if it there are no messages + available to return in the ``Pull`` response. Otherwise, + the system may wait (for a bounded amount of time) until + at least one message is available, rather than returning + no messages. Warning: setting this field to ``true`` is + discouraged because it adversely impacts the performance + of ``Pull`` operations. We recommend that users do not + set this field. + + This corresponds to the ``return_immediately`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + max_messages (:class:`int`): + Required. The maximum number of + messages to return for this request. + Must be a positive integer. The Pub/Sub + system may return fewer than the number + specified. + + This corresponds to the ``max_messages`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.PullResponse: + Response for the Pull method. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [subscription, return_immediately, max_messages] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.PullRequest): + request = pubsub.PullRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if return_immediately is not None: + request.return_immediately = return_immediately + if max_messages is not None: + request.max_messages = max_messages + + if request.return_immediately: + warnings.warn( + "The return_immediately flag is deprecated and should be set to False.", + category=DeprecationWarning, + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.pull] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def streaming_pull( + self, + requests: Optional[AsyncIterator[pubsub.StreamingPullRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[pubsub.StreamingPullResponse]]: + r"""Establishes a stream with the server, which sends messages down + to the client. The client streams acknowledgments and ack + deadline modifications back to the server. The server will close + the stream and return the status on any error. The server may + close the stream with status ``UNAVAILABLE`` to reassign + server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by + configuring the underlying RPC channel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_streaming_pull(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.StreamingPullRequest( + subscription="subscription_value", + stream_ack_deadline_seconds=2813, + ) + + # This method expects an iterator which contains + # 'pubsub_v1.StreamingPullRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.streaming_pull(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + Args: + requests (AsyncIterator[`google.pubsub_v1.types.StreamingPullRequest`]): + The request object AsyncIterator. Request for the ``StreamingPull`` streaming RPC method. + This request is used to establish the initial stream as + well as to stream acknowledgments and ack deadline + modifications from the client to the server. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + AsyncIterable[google.pubsub_v1.types.StreamingPullResponse]: + Response for the StreamingPull method. This response is used to stream + messages from the server to the client. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.streaming_pull + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def modify_push_config( + self, + request: Optional[Union[pubsub.ModifyPushConfigRequest, dict]] = None, + *, + subscription: Optional[str] = None, + push_config: Optional[pubsub.PushConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one + (signified by an empty ``PushConfig``) or vice versa, or change + the endpoint URL and other attributes of a push subscription. + Messages will accumulate for delivery continuously through the + call regardless of changes to the ``PushConfig``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_modify_push_config(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyPushConfigRequest( + subscription="subscription_value", + ) + + # Make the request + await client.modify_push_config(request=request) + + Args: + request (Optional[Union[google.pubsub_v1.types.ModifyPushConfigRequest, dict]]): + The request object. Request for the ModifyPushConfig + method. + subscription (:class:`str`): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + push_config (:class:`google.pubsub_v1.types.PushConfig`): + Required. The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub + system should stop pushing messages from the given + subscription and allow messages to be pulled and + acknowledged - effectively pausing the subscription if + ``Pull`` or ``StreamingPull`` is not called. + + This corresponds to the ``push_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [subscription, push_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ModifyPushConfigRequest): + request = pubsub.ModifyPushConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if push_config is not None: + request.push_config = push_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.modify_push_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_snapshot( + self, + request: Optional[Union[pubsub.GetSnapshotRequest, dict]] = None, + *, + snapshot: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Snapshot: + r"""Gets the configuration details of a snapshot. Snapshots are used + in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_get_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + response = await client.get_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.GetSnapshotRequest, dict]]): + The request object. Request for the GetSnapshot method. + snapshot (:class:`str`): + Required. The name of the snapshot to get. Format is + ``projects/{project}/snapshots/{snap}``. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Snapshot: + A snapshot resource. Snapshots are used in + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [snapshot] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.GetSnapshotRequest): + request = pubsub.GetSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_snapshot + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("snapshot", request.snapshot),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_snapshots( + self, + request: Optional[Union[pubsub.ListSnapshotsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSnapshotsAsyncPager: + r"""Lists the existing snapshots. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_list_snapshots(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSnapshotsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_snapshots(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.ListSnapshotsRequest, dict]]): + The request object. Request for the ``ListSnapshots`` method. + project (:class:`str`): + Required. The name of the project in which to list + snapshots. Format is ``projects/{project-id}``. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.services.subscriber.pagers.ListSnapshotsAsyncPager: + Response for the ListSnapshots method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ListSnapshotsRequest): + request = pubsub.ListSnapshotsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_snapshots + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSnapshotsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_snapshot( + self, + request: Optional[Union[pubsub.CreateSnapshotRequest, dict]] = None, + *, + name: Optional[str] = None, + subscription: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Snapshot: + r"""Creates a snapshot from the requested subscription. Snapshots + are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the + resulting snapshot would expire in less than 1 hour -- then + ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this + snapshot on the same project as the subscription, conforming to + the [resource name format] + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_create_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", + ) + + # Make the request + response = await client.create_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.CreateSnapshotRequest, dict]]): + The request object. Request for the ``CreateSnapshot`` method. + name (:class:`str`): + Required. User-provided name for this snapshot. If the + name is not provided in the request, the server will + assign a random name for this snapshot on the same + project as the subscription. Note that for REST API + requests, you must specify a name. See the `resource + name + rules `__. + Format is ``projects/{project}/snapshots/{snap}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subscription (:class:`str`): + Required. The subscription whose backlog the snapshot + retains. Specifically, the created snapshot is + guaranteed to retain: (a) The existing backlog on the + subscription. More precisely, this is defined as the + messages in the subscription's backlog that are + unacknowledged upon the successful completion of the + ``CreateSnapshot`` request; as well as: (b) Any messages + published to the subscription's topic following the + successful completion of the CreateSnapshot request. + Format is ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Snapshot: + A snapshot resource. Snapshots are used in + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, subscription] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.CreateSnapshotRequest): + request = pubsub.CreateSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_snapshot + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_snapshot( + self, + request: Optional[Union[pubsub.UpdateSnapshotRequest, dict]] = None, + *, + snapshot: Optional[pubsub.Snapshot] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Snapshot: + r"""Updates an existing snapshot by updating the fields specified in + the update mask. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_update_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.UpdateSnapshotRequest( + ) + + # Make the request + response = await client.update_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.UpdateSnapshotRequest, dict]]): + The request object. Request for the UpdateSnapshot + method. + snapshot (:class:`google.pubsub_v1.types.Snapshot`): + Required. The updated snapshot + object. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Indicates which fields in + the provided snapshot to update. Must be + specified and non-empty. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Snapshot: + A snapshot resource. Snapshots are used in + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [snapshot, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.UpdateSnapshotRequest): + request = pubsub.UpdateSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if snapshot is not None: + request.snapshot = snapshot + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_snapshot + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("snapshot.name", request.snapshot.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_snapshot( + self, + request: Optional[Union[pubsub.DeleteSnapshotRequest, dict]] = None, + *, + snapshot: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Removes an existing snapshot. Snapshots are used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + When the snapshot is deleted, all messages retained in the + snapshot are immediately dropped. After a snapshot is deleted, a + new one may be created with the same name, but the new one has + no association with the old snapshot or its subscription, unless + the same subscription is specified. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_delete_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + await client.delete_snapshot(request=request) + + Args: + request (Optional[Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]]): + The request object. Request for the ``DeleteSnapshot`` method. + snapshot (:class:`str`): + Required. The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [snapshot] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.DeleteSnapshotRequest): + request = pubsub.DeleteSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_snapshot + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("snapshot", request.snapshot),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def seek( + self, + request: Optional[Union[pubsub.SeekRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.SeekResponse: + r"""Seeks an existing subscription to a point in time or to a given + snapshot, whichever is provided in the request. Snapshots are + used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + Note that both the subscription and the snapshot must be on the + same topic. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_seek(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.SeekRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.seek(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.SeekRequest, dict]]): + The request object. Request for the ``Seek`` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.SeekResponse: + Response for the Seek method (this response is empty). + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.SeekRequest): + request = pubsub.SeekRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[self._client._transport.seek] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "SubscriberAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("SubscriberAsyncClient",) diff --git a/google/pubsub_v1/services/subscriber/client.py b/google/pubsub_v1/services/subscriber/client.py new file mode 100644 index 000000000..2a946f726 --- /dev/null +++ b/google/pubsub_v1/services/subscriber/client.py @@ -0,0 +1,3136 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import functools +import os +import re +from typing import ( + Dict, + Callable, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Iterable, + Iterator, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +import warnings +from google.pubsub_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.pubsub_v1.services.subscriber import pagers +from google.pubsub_v1.types import pubsub + +import grpc +from .transports.base import SubscriberTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import SubscriberGrpcTransport +from .transports.grpc_asyncio import SubscriberGrpcAsyncIOTransport +from .transports.rest import SubscriberRestTransport + + +class SubscriberClientMeta(type): + """Metaclass for the Subscriber client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[SubscriberTransport]] + _transport_registry["grpc"] = SubscriberGrpcTransport + _transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport + _transport_registry["rest"] = SubscriberRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[SubscriberTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SubscriberClient(metaclass=SubscriberClientMeta): + """The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ) + + SERVICE_ADDRESS = "pubsub.googleapis.com:443" + """The default address of the service.""" + + DEFAULT_ENDPOINT = "pubsub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "pubsub.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SubscriberClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SubscriberClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SubscriberTransport: + """Returns the transport used by the client instance. + + Returns: + SubscriberTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def listing_path( + project: str, + location: str, + data_exchange: str, + listing: str, + ) -> str: + """Returns a fully-qualified listing string.""" + return "projects/{project}/locations/{location}/dataExchanges/{data_exchange}/listings/{listing}".format( + project=project, + location=location, + data_exchange=data_exchange, + listing=listing, + ) + + @staticmethod + def parse_listing_path(path: str) -> Dict[str, str]: + """Parses a listing path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataExchanges/(?P.+?)/listings/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def snapshot_path( + project: str, + snapshot: str, + ) -> str: + """Returns a fully-qualified snapshot string.""" + return "projects/{project}/snapshots/{snapshot}".format( + project=project, + snapshot=snapshot, + ) + + @staticmethod + def parse_snapshot_path(path: str) -> Dict[str, str]: + """Parses a snapshot path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/snapshots/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def subscription_path( + project: str, + subscription: str, + ) -> str: + """Returns a fully-qualified subscription string.""" + return "projects/{project}/subscriptions/{subscription}".format( + project=project, + subscription=subscription, + ) + + @staticmethod + def parse_subscription_path(path: str) -> Dict[str, str]: + """Parses a subscription path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/subscriptions/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def topic_path( + project: str, + topic: str, + ) -> str: + """Returns a fully-qualified topic string.""" + return "projects/{project}/topics/{topic}".format( + project=project, + topic=topic, + ) + + @staticmethod + def parse_topic_path(path: str) -> Dict[str, str]: + """Parses a topic path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = SubscriberClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert: + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = SubscriberClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert, use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = SubscriberClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = SubscriberClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = SubscriberClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = SubscriberClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, SubscriberTransport, Callable[..., SubscriberTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the subscriber client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,SubscriberTransport,Callable[..., SubscriberTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SubscriberTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = SubscriberClient._read_environment_variables() + self._client_cert_source = SubscriberClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = SubscriberClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, SubscriberTransport) + if transport_provided: + # transport is a SubscriberTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(SubscriberTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or SubscriberClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[SubscriberTransport], Callable[..., SubscriberTransport] + ] = ( + SubscriberClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., SubscriberTransport], transport) + ) + # initialize with the provided callable or the passed in class + + emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") + if emulator_host: + if issubclass(transport_init, type(self)._transport_registry["grpc"]): # type: ignore + channel = grpc.insecure_channel(target=emulator_host) + else: + channel = grpc.aio.insecure_channel(target=emulator_host) + transport_init = functools.partial(transport_init, channel=channel) + + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.pubsub_v1.SubscriberClient`.", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.pubsub.v1.Subscriber", + "credentialsType": None, + }, + ) + + def create_subscription( + self, + request: Optional[Union[pubsub.Subscription, dict]] = None, + *, + name: Optional[str] = None, + topic: Optional[str] = None, + push_config: Optional[pubsub.PushConfig] = None, + ack_deadline_seconds: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Subscription: + r"""Creates a subscription to a given topic. See the [resource name + rules] + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + If the subscription already exists, returns ``ALREADY_EXISTS``. + If the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will + assign a random name for this subscription on the same project + as the topic, conforming to the [resource name format] + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + The generated name is populated in the returned Subscription + object. Note that for REST API requests, you must specify a name + in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_create_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.Subscription( + name="name_value", + topic="topic_value", + ) + + # Make the request + response = client.create_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.Subscription, dict]): + The request object. A subscription resource. If none of ``push_config``, + ``bigquery_config``, or ``cloud_storage_config`` is set, + then the subscriber will pull and ack messages using API + methods. At most one of these fields may be set. + name (str): + Required. The name of the subscription. It must have the + format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain + only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes + (``-``), underscores (``_``), periods (``.``), tildes + (``~``), plus (``+``) or percent signs (``%``). It must + be between 3 and 255 characters in length, and it must + not start with ``"goog"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + topic (str): + Required. The name of the topic from which this + subscription is receiving messages. Format is + ``projects/{project}/topics/{topic}``. The value of this + field will be ``_deleted-topic_`` if the topic has been + deleted. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + push_config (google.pubsub_v1.types.PushConfig): + Optional. If push delivery is used + with this subscription, this field is + used to configure it. + + This corresponds to the ``push_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_deadline_seconds (int): + Optional. The approximate amount of time (on a + best-effort basis) Pub/Sub waits for the subscriber to + acknowledge receipt before resending the message. In the + interval after the message is delivered and before it is + acknowledged, it is considered to be *outstanding*. + During that time period, the message will not be + redelivered (on a best-effort basis). + + For pull subscriptions, this value is used as the + initial value for the ack deadline. To override this + value for a given message, call ``ModifyAckDeadline`` + with the corresponding ``ack_id`` if using non-streaming + pull or send the ``ack_id`` in a + ``StreamingModifyAckDeadlineRequest`` if using streaming + pull. The minimum custom deadline you can specify is 10 + seconds. The maximum custom deadline you can specify is + 600 seconds (10 minutes). If this parameter is 0, a + default value of 10 seconds is used. + + For push delivery, this value is also used to set the + request timeout for the call to the push endpoint. + + If the subscriber never acknowledges the message, the + Pub/Sub system will eventually redeliver the message. + + This corresponds to the ``ack_deadline_seconds`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Subscription: + A subscription resource. If none of push_config, bigquery_config, or + cloud_storage_config is set, then the subscriber will + pull and ack messages using API methods. At most one + of these fields may be set. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, topic, push_config, ack_deadline_seconds] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.Subscription): + request = pubsub.Subscription(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if topic is not None: + request.topic = topic + if push_config is not None: + request.push_config = push_config + if ack_deadline_seconds is not None: + request.ack_deadline_seconds = ack_deadline_seconds + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_subscription( + self, + request: Optional[Union[pubsub.GetSubscriptionRequest, dict]] = None, + *, + subscription: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Subscription: + r"""Gets the configuration details of a subscription. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_get_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.get_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.GetSubscriptionRequest, dict]): + The request object. Request for the GetSubscription + method. + subscription (str): + Required. The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Subscription: + A subscription resource. If none of push_config, bigquery_config, or + cloud_storage_config is set, then the subscriber will + pull and ack messages using API methods. At most one + of these fields may be set. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [subscription] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.GetSubscriptionRequest): + request = pubsub.GetSubscriptionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_subscription( + self, + request: Optional[Union[pubsub.UpdateSubscriptionRequest, dict]] = None, + *, + subscription: Optional[pubsub.Subscription] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Subscription: + r"""Updates an existing subscription by updating the + fields specified in the update mask. Note that certain + properties of a subscription, such as its topic, are not + modifiable. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_update_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + subscription = pubsub_v1.Subscription() + subscription.name = "name_value" + subscription.topic = "topic_value" + + request = pubsub_v1.UpdateSubscriptionRequest( + subscription=subscription, + ) + + # Make the request + response = client.update_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.UpdateSubscriptionRequest, dict]): + The request object. Request for the UpdateSubscription + method. + subscription (google.pubsub_v1.types.Subscription): + Required. The updated subscription + object. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Indicates which fields in + the provided subscription to update. + Must be specified and non-empty. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Subscription: + A subscription resource. If none of push_config, bigquery_config, or + cloud_storage_config is set, then the subscriber will + pull and ack messages using API methods. At most one + of these fields may be set. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [subscription, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.UpdateSubscriptionRequest): + request = pubsub.UpdateSubscriptionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription.name", request.subscription.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_subscriptions( + self, + request: Optional[Union[pubsub.ListSubscriptionsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSubscriptionsPager: + r"""Lists matching subscriptions. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_list_subscriptions(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSubscriptionsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_subscriptions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListSubscriptionsRequest, dict]): + The request object. Request for the ``ListSubscriptions`` method. + project (str): + Required. The name of the project in which to list + subscriptions. Format is ``projects/{project-id}``. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsPager: + Response for the ListSubscriptions method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ListSubscriptionsRequest): + request = pubsub.ListSubscriptionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_subscriptions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSubscriptionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_subscription( + self, + request: Optional[Union[pubsub.DeleteSubscriptionRequest, dict]] = None, + *, + subscription: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after + deletion will return ``NOT_FOUND``. After a subscription is + deleted, a new one may be created with the same name, but the + new one has no association with the old subscription or its + topic unless the same topic is specified. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_delete_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + client.delete_subscription(request=request) + + Args: + request (Union[google.pubsub_v1.types.DeleteSubscriptionRequest, dict]): + The request object. Request for the DeleteSubscription + method. + subscription (str): + Required. The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [subscription] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.DeleteSubscriptionRequest): + request = pubsub.DeleteSubscriptionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def modify_ack_deadline( + self, + request: Optional[Union[pubsub.ModifyAckDeadlineRequest, dict]] = None, + *, + subscription: Optional[str] = None, + ack_ids: Optional[MutableSequence[str]] = None, + ack_deadline_seconds: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message + by the subscriber, or to make the message available for + redelivery if the processing was interrupted. Note that this + does not modify the subscription-level ``ackDeadlineSeconds`` + used for subsequent messages. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_modify_ack_deadline(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyAckDeadlineRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value1', 'ack_ids_value2'], + ack_deadline_seconds=2066, + ) + + # Make the request + client.modify_ack_deadline(request=request) + + Args: + request (Union[google.pubsub_v1.types.ModifyAckDeadlineRequest, dict]): + The request object. Request for the ModifyAckDeadline + method. + subscription (str): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_ids (MutableSequence[str]): + Required. List of acknowledgment IDs. + This corresponds to the ``ack_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_deadline_seconds (int): + Required. The new ack deadline with respect to the time + this request was sent to the Pub/Sub system. For + example, if the value is 10, the new ack deadline will + expire 10 seconds after the ``ModifyAckDeadline`` call + was made. Specifying zero might immediately make the + message available for delivery to another subscriber + client. This typically results in an increase in the + rate of message redeliveries (that is, duplicates). The + minimum deadline you can specify is 0 seconds. The + maximum deadline you can specify in a single request is + 600 seconds (10 minutes). + + This corresponds to the ``ack_deadline_seconds`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [subscription, ack_ids, ack_deadline_seconds] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ModifyAckDeadlineRequest): + request = pubsub.ModifyAckDeadlineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if ack_ids is not None: + request.ack_ids = ack_ids + if ack_deadline_seconds is not None: + request.ack_deadline_seconds = ack_deadline_seconds + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.modify_ack_deadline] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def acknowledge( + self, + request: Optional[Union[pubsub.AcknowledgeRequest, dict]] = None, + *, + subscription: Optional[str] = None, + ack_ids: Optional[MutableSequence[str]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the + relevant messages from the subscription. + + Acknowledging a message whose ack deadline has expired may + succeed, but such a message may be redelivered later. + Acknowledging a message more than once will not result in an + error. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_acknowledge(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.AcknowledgeRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value1', 'ack_ids_value2'], + ) + + # Make the request + client.acknowledge(request=request) + + Args: + request (Union[google.pubsub_v1.types.AcknowledgeRequest, dict]): + The request object. Request for the Acknowledge method. + subscription (str): + Required. The subscription whose message is being + acknowledged. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_ids (MutableSequence[str]): + Required. The acknowledgment ID for the messages being + acknowledged that was returned by the Pub/Sub system in + the ``Pull`` response. Must not be empty. + + This corresponds to the ``ack_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [subscription, ack_ids] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.AcknowledgeRequest): + request = pubsub.AcknowledgeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if ack_ids is not None: + request.ack_ids = ack_ids + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.acknowledge] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def pull( + self, + request: Optional[Union[pubsub.PullRequest, dict]] = None, + *, + subscription: Optional[str] = None, + return_immediately: Optional[bool] = None, + max_messages: Optional[int] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.PullResponse: + r"""Pulls messages from the server. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_pull(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.PullRequest( + subscription="subscription_value", + max_messages=1277, + ) + + # Make the request + response = client.pull(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.PullRequest, dict]): + The request object. Request for the ``Pull`` method. + subscription (str): + Required. The subscription from which messages should be + pulled. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + return_immediately (bool): + Optional. If this field set to true, the system will + respond immediately even if it there are no messages + available to return in the ``Pull`` response. Otherwise, + the system may wait (for a bounded amount of time) until + at least one message is available, rather than returning + no messages. Warning: setting this field to ``true`` is + discouraged because it adversely impacts the performance + of ``Pull`` operations. We recommend that users do not + set this field. + + This corresponds to the ``return_immediately`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + max_messages (int): + Required. The maximum number of + messages to return for this request. + Must be a positive integer. The Pub/Sub + system may return fewer than the number + specified. + + This corresponds to the ``max_messages`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.PullResponse: + Response for the Pull method. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [subscription, return_immediately, max_messages] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.PullRequest): + request = pubsub.PullRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if return_immediately is not None: + request.return_immediately = return_immediately + if max_messages is not None: + request.max_messages = max_messages + + if request.return_immediately: + warnings.warn( + "The return_immediately flag is deprecated and should be set to False.", + category=DeprecationWarning, + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pull] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def streaming_pull( + self, + requests: Optional[Iterator[pubsub.StreamingPullRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[pubsub.StreamingPullResponse]: + r"""Establishes a stream with the server, which sends messages down + to the client. The client streams acknowledgments and ack + deadline modifications back to the server. The server will close + the stream and return the status on any error. The server may + close the stream with status ``UNAVAILABLE`` to reassign + server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by + configuring the underlying RPC channel. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_streaming_pull(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.StreamingPullRequest( + subscription="subscription_value", + stream_ack_deadline_seconds=2813, + ) + + # This method expects an iterator which contains + # 'pubsub_v1.StreamingPullRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_pull(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + Args: + requests (Iterator[google.pubsub_v1.types.StreamingPullRequest]): + The request object iterator. Request for the ``StreamingPull`` streaming RPC method. + This request is used to establish the initial stream as + well as to stream acknowledgments and ack deadline + modifications from the client to the server. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + Iterable[google.pubsub_v1.types.StreamingPullResponse]: + Response for the StreamingPull method. This response is used to stream + messages from the server to the client. + + """ + + # Wrappers in api-core should not automatically pre-fetch the first + # stream result, as this breaks the stream when re-opening it. + # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 + self._transport.streaming_pull._prefetch_first_result_ = False + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.streaming_pull] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def modify_push_config( + self, + request: Optional[Union[pubsub.ModifyPushConfigRequest, dict]] = None, + *, + subscription: Optional[str] = None, + push_config: Optional[pubsub.PushConfig] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one + (signified by an empty ``PushConfig``) or vice versa, or change + the endpoint URL and other attributes of a push subscription. + Messages will accumulate for delivery continuously through the + call regardless of changes to the ``PushConfig``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_modify_push_config(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyPushConfigRequest( + subscription="subscription_value", + ) + + # Make the request + client.modify_push_config(request=request) + + Args: + request (Union[google.pubsub_v1.types.ModifyPushConfigRequest, dict]): + The request object. Request for the ModifyPushConfig + method. + subscription (str): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + push_config (google.pubsub_v1.types.PushConfig): + Required. The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub + system should stop pushing messages from the given + subscription and allow messages to be pulled and + acknowledged - effectively pausing the subscription if + ``Pull`` or ``StreamingPull`` is not called. + + This corresponds to the ``push_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [subscription, push_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ModifyPushConfigRequest): + request = pubsub.ModifyPushConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if push_config is not None: + request.push_config = push_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.modify_push_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_snapshot( + self, + request: Optional[Union[pubsub.GetSnapshotRequest, dict]] = None, + *, + snapshot: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Snapshot: + r"""Gets the configuration details of a snapshot. Snapshots are used + in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_get_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + response = client.get_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.GetSnapshotRequest, dict]): + The request object. Request for the GetSnapshot method. + snapshot (str): + Required. The name of the snapshot to get. Format is + ``projects/{project}/snapshots/{snap}``. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Snapshot: + A snapshot resource. Snapshots are used in + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [snapshot] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.GetSnapshotRequest): + request = pubsub.GetSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("snapshot", request.snapshot),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_snapshots( + self, + request: Optional[Union[pubsub.ListSnapshotsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListSnapshotsPager: + r"""Lists the existing snapshots. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_list_snapshots(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSnapshotsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_snapshots(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListSnapshotsRequest, dict]): + The request object. Request for the ``ListSnapshots`` method. + project (str): + Required. The name of the project in which to list + snapshots. Format is ``projects/{project-id}``. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.services.subscriber.pagers.ListSnapshotsPager: + Response for the ListSnapshots method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ListSnapshotsRequest): + request = pubsub.ListSnapshotsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_snapshots] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSnapshotsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_snapshot( + self, + request: Optional[Union[pubsub.CreateSnapshotRequest, dict]] = None, + *, + name: Optional[str] = None, + subscription: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Snapshot: + r"""Creates a snapshot from the requested subscription. Snapshots + are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the + resulting snapshot would expire in less than 1 hour -- then + ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this + snapshot on the same project as the subscription, conforming to + the [resource name format] + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_create_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", + ) + + # Make the request + response = client.create_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.CreateSnapshotRequest, dict]): + The request object. Request for the ``CreateSnapshot`` method. + name (str): + Required. User-provided name for this snapshot. If the + name is not provided in the request, the server will + assign a random name for this snapshot on the same + project as the subscription. Note that for REST API + requests, you must specify a name. See the `resource + name + rules `__. + Format is ``projects/{project}/snapshots/{snap}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subscription (str): + Required. The subscription whose backlog the snapshot + retains. Specifically, the created snapshot is + guaranteed to retain: (a) The existing backlog on the + subscription. More precisely, this is defined as the + messages in the subscription's backlog that are + unacknowledged upon the successful completion of the + ``CreateSnapshot`` request; as well as: (b) Any messages + published to the subscription's topic following the + successful completion of the CreateSnapshot request. + Format is ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Snapshot: + A snapshot resource. Snapshots are used in + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, subscription] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.CreateSnapshotRequest): + request = pubsub.CreateSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_snapshot( + self, + request: Optional[Union[pubsub.UpdateSnapshotRequest, dict]] = None, + *, + snapshot: Optional[pubsub.Snapshot] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Snapshot: + r"""Updates an existing snapshot by updating the fields specified in + the update mask. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_update_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.UpdateSnapshotRequest( + ) + + # Make the request + response = client.update_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.UpdateSnapshotRequest, dict]): + The request object. Request for the UpdateSnapshot + method. + snapshot (google.pubsub_v1.types.Snapshot): + Required. The updated snapshot + object. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Indicates which fields in + the provided snapshot to update. Must be + specified and non-empty. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.Snapshot: + A snapshot resource. Snapshots are used in + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [snapshot, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.UpdateSnapshotRequest): + request = pubsub.UpdateSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if snapshot is not None: + request.snapshot = snapshot + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("snapshot.name", request.snapshot.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_snapshot( + self, + request: Optional[Union[pubsub.DeleteSnapshotRequest, dict]] = None, + *, + snapshot: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Removes an existing snapshot. Snapshots are used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + When the snapshot is deleted, all messages retained in the + snapshot are immediately dropped. After a snapshot is deleted, a + new one may be created with the same name, but the new one has + no association with the old snapshot or its subscription, unless + the same subscription is specified. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_delete_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + client.delete_snapshot(request=request) + + Args: + request (Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]): + The request object. Request for the ``DeleteSnapshot`` method. + snapshot (str): + Required. The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [snapshot] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.DeleteSnapshotRequest): + request = pubsub.DeleteSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("snapshot", request.snapshot),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def seek( + self, + request: Optional[Union[pubsub.SeekRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.SeekResponse: + r"""Seeks an existing subscription to a point in time or to a given + snapshot, whichever is provided in the request. Snapshots are + used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + Note that both the subscription and the snapshot must be on the + same topic. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_seek(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.SeekRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.seek(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.SeekRequest, dict]): + The request object. Request for the ``Seek`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.pubsub_v1.types.SeekResponse: + Response for the Seek method (this response is empty). + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.SeekRequest): + request = pubsub.SeekRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.seek] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "SubscriberClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("SubscriberClient",) diff --git a/google/pubsub_v1/services/subscriber/pagers.py b/google/pubsub_v1/services/subscriber/pagers.py new file mode 100644 index 000000000..9f879cfc8 --- /dev/null +++ b/google/pubsub_v1/services/subscriber/pagers.py @@ -0,0 +1,352 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Sequence, + Tuple, + Optional, + Iterator, + Union, +) + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.pubsub_v1.types import pubsub + + +class ListSubscriptionsPager: + """A pager for iterating through ``list_subscriptions`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListSubscriptionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., pubsub.ListSubscriptionsResponse], + request: pubsub.ListSubscriptionsRequest, + response: pubsub.ListSubscriptionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListSubscriptionsRequest): + The initial request object. + response (google.pubsub_v1.types.ListSubscriptionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = pubsub.ListSubscriptionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[pubsub.ListSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[pubsub.Subscription]: + for page in self.pages: + yield from page.subscriptions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSubscriptionsAsyncPager: + """A pager for iterating through ``list_subscriptions`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListSubscriptionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[pubsub.ListSubscriptionsResponse]], + request: pubsub.ListSubscriptionsRequest, + response: pubsub.ListSubscriptionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListSubscriptionsRequest): + The initial request object. + response (google.pubsub_v1.types.ListSubscriptionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = pubsub.ListSubscriptionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[pubsub.ListSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[pubsub.Subscription]: + async def async_generator(): + async for page in self.pages: + for response in page.subscriptions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSnapshotsPager: + """A pager for iterating through ``list_snapshots`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListSnapshotsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``snapshots`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSnapshots`` requests and continue to iterate + through the ``snapshots`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., pubsub.ListSnapshotsResponse], + request: pubsub.ListSnapshotsRequest, + response: pubsub.ListSnapshotsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListSnapshotsRequest): + The initial request object. + response (google.pubsub_v1.types.ListSnapshotsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = pubsub.ListSnapshotsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[pubsub.ListSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[pubsub.Snapshot]: + for page in self.pages: + yield from page.snapshots + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSnapshotsAsyncPager: + """A pager for iterating through ``list_snapshots`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListSnapshotsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``snapshots`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSnapshots`` requests and continue to iterate + through the ``snapshots`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[pubsub.ListSnapshotsResponse]], + request: pubsub.ListSnapshotsRequest, + response: pubsub.ListSnapshotsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListSnapshotsRequest): + The initial request object. + response (google.pubsub_v1.types.ListSnapshotsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = pubsub.ListSnapshotsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[pubsub.ListSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[pubsub.Snapshot]: + async def async_generator(): + async for page in self.pages: + for response in page.snapshots: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/pubsub_v1/services/subscriber/transports/README.rst b/google/pubsub_v1/services/subscriber/transports/README.rst new file mode 100644 index 000000000..2df98ffe6 --- /dev/null +++ b/google/pubsub_v1/services/subscriber/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`SubscriberTransport` is the ABC for all transports. +- public child `SubscriberGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `SubscriberGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseSubscriberRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `SubscriberRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/google/pubsub_v1/services/subscriber/transports/__init__.py b/google/pubsub_v1/services/subscriber/transports/__init__.py new file mode 100644 index 000000000..73e9fd44f --- /dev/null +++ b/google/pubsub_v1/services/subscriber/transports/__init__.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SubscriberTransport +from .grpc import SubscriberGrpcTransport +from .grpc_asyncio import SubscriberGrpcAsyncIOTransport +from .rest import SubscriberRestTransport +from .rest import SubscriberRestInterceptor + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SubscriberTransport]] +_transport_registry["grpc"] = SubscriberGrpcTransport +_transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport +_transport_registry["rest"] = SubscriberRestTransport + +__all__ = ( + "SubscriberTransport", + "SubscriberGrpcTransport", + "SubscriberGrpcAsyncIOTransport", + "SubscriberRestTransport", + "SubscriberRestInterceptor", +) diff --git a/google/pubsub_v1/services/subscriber/transports/base.py b/google/pubsub_v1/services/subscriber/transports/base.py new file mode 100644 index 000000000..a25ff562f --- /dev/null +++ b/google/pubsub_v1/services/subscriber/transports/base.py @@ -0,0 +1,590 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.pubsub_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class SubscriberTransport(abc.ABC): + """Abstract transport class for Subscriber.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ) + + DEFAULT_HOST: str = "pubsub.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_subscription: gapic_v1.method.wrap_method( + self.create_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_subscription: gapic_v1.method.wrap_method( + self.get_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_subscription: gapic_v1.method.wrap_method( + self.update_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_subscriptions: gapic_v1.method.wrap_method( + self.list_subscriptions, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_subscription: gapic_v1.method.wrap_method( + self.delete_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.modify_ack_deadline: gapic_v1.method.wrap_method( + self.modify_ack_deadline, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.acknowledge: gapic_v1.method.wrap_method( + self.acknowledge, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.pull: gapic_v1.method.wrap_method( + self.pull, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.streaming_pull: gapic_v1.method.wrap_method( + self.streaming_pull, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=4, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=client_info, + ), + self.modify_push_config: gapic_v1.method.wrap_method( + self.modify_push_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_snapshot: gapic_v1.method.wrap_method( + self.get_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_snapshots: gapic_v1.method.wrap_method( + self.list_snapshots, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_snapshot: gapic_v1.method.wrap_method( + self.create_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_snapshot: gapic_v1.method.wrap_method( + self.update_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_snapshot: gapic_v1.method.wrap_method( + self.delete_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.seek: gapic_v1.method.wrap_method( + self.seek, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_subscription( + self, + ) -> Callable[ + [pubsub.Subscription], + Union[pubsub.Subscription, Awaitable[pubsub.Subscription]], + ]: + raise NotImplementedError() + + @property + def get_subscription( + self, + ) -> Callable[ + [pubsub.GetSubscriptionRequest], + Union[pubsub.Subscription, Awaitable[pubsub.Subscription]], + ]: + raise NotImplementedError() + + @property + def update_subscription( + self, + ) -> Callable[ + [pubsub.UpdateSubscriptionRequest], + Union[pubsub.Subscription, Awaitable[pubsub.Subscription]], + ]: + raise NotImplementedError() + + @property + def list_subscriptions( + self, + ) -> Callable[ + [pubsub.ListSubscriptionsRequest], + Union[ + pubsub.ListSubscriptionsResponse, + Awaitable[pubsub.ListSubscriptionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_subscription( + self, + ) -> Callable[ + [pubsub.DeleteSubscriptionRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def modify_ack_deadline( + self, + ) -> Callable[ + [pubsub.ModifyAckDeadlineRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def acknowledge( + self, + ) -> Callable[ + [pubsub.AcknowledgeRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] + ]: + raise NotImplementedError() + + @property + def pull( + self, + ) -> Callable[ + [pubsub.PullRequest], Union[pubsub.PullResponse, Awaitable[pubsub.PullResponse]] + ]: + raise NotImplementedError() + + @property + def streaming_pull( + self, + ) -> Callable[ + [pubsub.StreamingPullRequest], + Union[pubsub.StreamingPullResponse, Awaitable[pubsub.StreamingPullResponse]], + ]: + raise NotImplementedError() + + @property + def modify_push_config( + self, + ) -> Callable[ + [pubsub.ModifyPushConfigRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_snapshot( + self, + ) -> Callable[ + [pubsub.GetSnapshotRequest], Union[pubsub.Snapshot, Awaitable[pubsub.Snapshot]] + ]: + raise NotImplementedError() + + @property + def list_snapshots( + self, + ) -> Callable[ + [pubsub.ListSnapshotsRequest], + Union[pubsub.ListSnapshotsResponse, Awaitable[pubsub.ListSnapshotsResponse]], + ]: + raise NotImplementedError() + + @property + def create_snapshot( + self, + ) -> Callable[ + [pubsub.CreateSnapshotRequest], + Union[pubsub.Snapshot, Awaitable[pubsub.Snapshot]], + ]: + raise NotImplementedError() + + @property + def update_snapshot( + self, + ) -> Callable[ + [pubsub.UpdateSnapshotRequest], + Union[pubsub.Snapshot, Awaitable[pubsub.Snapshot]], + ]: + raise NotImplementedError() + + @property + def delete_snapshot( + self, + ) -> Callable[ + [pubsub.DeleteSnapshotRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def seek( + self, + ) -> Callable[ + [pubsub.SeekRequest], Union[pubsub.SeekResponse, Awaitable[pubsub.SeekResponse]] + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("SubscriberTransport",) diff --git a/google/pubsub_v1/services/subscriber/transports/grpc.py b/google/pubsub_v1/services/subscriber/transports/grpc.py new file mode 100644 index 000000000..705163791 --- /dev/null +++ b/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -0,0 +1,923 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub +from .base import SubscriberTransport, DEFAULT_CLIENT_INFO + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class SubscriberGrpcTransport(SubscriberTransport): + """gRPC backend transport for Subscriber. + + The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_subscription( + self, + ) -> Callable[[pubsub.Subscription], pubsub.Subscription]: + r"""Return a callable for the create subscription method over gRPC. + + Creates a subscription to a given topic. See the [resource name + rules] + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + If the subscription already exists, returns ``ALREADY_EXISTS``. + If the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will + assign a random name for this subscription on the same project + as the topic, conforming to the [resource name format] + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + The generated name is populated in the returned Subscription + object. Note that for REST API requests, you must specify a name + in the request. + + Returns: + Callable[[~.Subscription], + ~.Subscription]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_subscription" not in self._stubs: + self._stubs["create_subscription"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/CreateSubscription", + request_serializer=pubsub.Subscription.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["create_subscription"] + + @property + def get_subscription( + self, + ) -> Callable[[pubsub.GetSubscriptionRequest], pubsub.Subscription]: + r"""Return a callable for the get subscription method over gRPC. + + Gets the configuration details of a subscription. + + Returns: + Callable[[~.GetSubscriptionRequest], + ~.Subscription]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_subscription" not in self._stubs: + self._stubs["get_subscription"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/GetSubscription", + request_serializer=pubsub.GetSubscriptionRequest.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["get_subscription"] + + @property + def update_subscription( + self, + ) -> Callable[[pubsub.UpdateSubscriptionRequest], pubsub.Subscription]: + r"""Return a callable for the update subscription method over gRPC. + + Updates an existing subscription by updating the + fields specified in the update mask. Note that certain + properties of a subscription, such as its topic, are not + modifiable. + + Returns: + Callable[[~.UpdateSubscriptionRequest], + ~.Subscription]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_subscription" not in self._stubs: + self._stubs["update_subscription"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/UpdateSubscription", + request_serializer=pubsub.UpdateSubscriptionRequest.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["update_subscription"] + + @property + def list_subscriptions( + self, + ) -> Callable[[pubsub.ListSubscriptionsRequest], pubsub.ListSubscriptionsResponse]: + r"""Return a callable for the list subscriptions method over gRPC. + + Lists matching subscriptions. + + Returns: + Callable[[~.ListSubscriptionsRequest], + ~.ListSubscriptionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_subscriptions" not in self._stubs: + self._stubs["list_subscriptions"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ListSubscriptions", + request_serializer=pubsub.ListSubscriptionsRequest.serialize, + response_deserializer=pubsub.ListSubscriptionsResponse.deserialize, + ) + return self._stubs["list_subscriptions"] + + @property + def delete_subscription( + self, + ) -> Callable[[pubsub.DeleteSubscriptionRequest], empty_pb2.Empty]: + r"""Return a callable for the delete subscription method over gRPC. + + Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after + deletion will return ``NOT_FOUND``. After a subscription is + deleted, a new one may be created with the same name, but the + new one has no association with the old subscription or its + topic unless the same topic is specified. + + Returns: + Callable[[~.DeleteSubscriptionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_subscription" not in self._stubs: + self._stubs["delete_subscription"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/DeleteSubscription", + request_serializer=pubsub.DeleteSubscriptionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_subscription"] + + @property + def modify_ack_deadline( + self, + ) -> Callable[[pubsub.ModifyAckDeadlineRequest], empty_pb2.Empty]: + r"""Return a callable for the modify ack deadline method over gRPC. + + Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message + by the subscriber, or to make the message available for + redelivery if the processing was interrupted. Note that this + does not modify the subscription-level ``ackDeadlineSeconds`` + used for subsequent messages. + + Returns: + Callable[[~.ModifyAckDeadlineRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_ack_deadline" not in self._stubs: + self._stubs["modify_ack_deadline"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ModifyAckDeadline", + request_serializer=pubsub.ModifyAckDeadlineRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["modify_ack_deadline"] + + @property + def acknowledge(self) -> Callable[[pubsub.AcknowledgeRequest], empty_pb2.Empty]: + r"""Return a callable for the acknowledge method over gRPC. + + Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the + relevant messages from the subscription. + + Acknowledging a message whose ack deadline has expired may + succeed, but such a message may be redelivered later. + Acknowledging a message more than once will not result in an + error. + + Returns: + Callable[[~.AcknowledgeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "acknowledge" not in self._stubs: + self._stubs["acknowledge"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Acknowledge", + request_serializer=pubsub.AcknowledgeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["acknowledge"] + + @property + def pull(self) -> Callable[[pubsub.PullRequest], pubsub.PullResponse]: + r"""Return a callable for the pull method over gRPC. + + Pulls messages from the server. + + Returns: + Callable[[~.PullRequest], + ~.PullResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pull" not in self._stubs: + self._stubs["pull"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Pull", + request_serializer=pubsub.PullRequest.serialize, + response_deserializer=pubsub.PullResponse.deserialize, + ) + return self._stubs["pull"] + + @property + def streaming_pull( + self, + ) -> Callable[[pubsub.StreamingPullRequest], pubsub.StreamingPullResponse]: + r"""Return a callable for the streaming pull method over gRPC. + + Establishes a stream with the server, which sends messages down + to the client. The client streams acknowledgments and ack + deadline modifications back to the server. The server will close + the stream and return the status on any error. The server may + close the stream with status ``UNAVAILABLE`` to reassign + server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by + configuring the underlying RPC channel. + + Returns: + Callable[[~.StreamingPullRequest], + ~.StreamingPullResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "streaming_pull" not in self._stubs: + self._stubs["streaming_pull"] = self._logged_channel.stream_stream( + "/google.pubsub.v1.Subscriber/StreamingPull", + request_serializer=pubsub.StreamingPullRequest.serialize, + response_deserializer=pubsub.StreamingPullResponse.deserialize, + ) + return self._stubs["streaming_pull"] + + @property + def modify_push_config( + self, + ) -> Callable[[pubsub.ModifyPushConfigRequest], empty_pb2.Empty]: + r"""Return a callable for the modify push config method over gRPC. + + Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one + (signified by an empty ``PushConfig``) or vice versa, or change + the endpoint URL and other attributes of a push subscription. + Messages will accumulate for delivery continuously through the + call regardless of changes to the ``PushConfig``. + + Returns: + Callable[[~.ModifyPushConfigRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_push_config" not in self._stubs: + self._stubs["modify_push_config"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ModifyPushConfig", + request_serializer=pubsub.ModifyPushConfigRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["modify_push_config"] + + @property + def get_snapshot(self) -> Callable[[pubsub.GetSnapshotRequest], pubsub.Snapshot]: + r"""Return a callable for the get snapshot method over gRPC. + + Gets the configuration details of a snapshot. Snapshots are used + in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.GetSnapshotRequest], + ~.Snapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_snapshot" not in self._stubs: + self._stubs["get_snapshot"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/GetSnapshot", + request_serializer=pubsub.GetSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["get_snapshot"] + + @property + def list_snapshots( + self, + ) -> Callable[[pubsub.ListSnapshotsRequest], pubsub.ListSnapshotsResponse]: + r"""Return a callable for the list snapshots method over gRPC. + + Lists the existing snapshots. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.ListSnapshotsRequest], + ~.ListSnapshotsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_snapshots" not in self._stubs: + self._stubs["list_snapshots"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ListSnapshots", + request_serializer=pubsub.ListSnapshotsRequest.serialize, + response_deserializer=pubsub.ListSnapshotsResponse.deserialize, + ) + return self._stubs["list_snapshots"] + + @property + def create_snapshot( + self, + ) -> Callable[[pubsub.CreateSnapshotRequest], pubsub.Snapshot]: + r"""Return a callable for the create snapshot method over gRPC. + + Creates a snapshot from the requested subscription. Snapshots + are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the + resulting snapshot would expire in less than 1 hour -- then + ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this + snapshot on the same project as the subscription, conforming to + the [resource name format] + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the + request. + + Returns: + Callable[[~.CreateSnapshotRequest], + ~.Snapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_snapshot" not in self._stubs: + self._stubs["create_snapshot"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/CreateSnapshot", + request_serializer=pubsub.CreateSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["create_snapshot"] + + @property + def update_snapshot( + self, + ) -> Callable[[pubsub.UpdateSnapshotRequest], pubsub.Snapshot]: + r"""Return a callable for the update snapshot method over gRPC. + + Updates an existing snapshot by updating the fields specified in + the update mask. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.UpdateSnapshotRequest], + ~.Snapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_snapshot" not in self._stubs: + self._stubs["update_snapshot"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/UpdateSnapshot", + request_serializer=pubsub.UpdateSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["update_snapshot"] + + @property + def delete_snapshot( + self, + ) -> Callable[[pubsub.DeleteSnapshotRequest], empty_pb2.Empty]: + r"""Return a callable for the delete snapshot method over gRPC. + + Removes an existing snapshot. Snapshots are used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + When the snapshot is deleted, all messages retained in the + snapshot are immediately dropped. After a snapshot is deleted, a + new one may be created with the same name, but the new one has + no association with the old snapshot or its subscription, unless + the same subscription is specified. + + Returns: + Callable[[~.DeleteSnapshotRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_snapshot" not in self._stubs: + self._stubs["delete_snapshot"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/DeleteSnapshot", + request_serializer=pubsub.DeleteSnapshotRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_snapshot"] + + @property + def seek(self) -> Callable[[pubsub.SeekRequest], pubsub.SeekResponse]: + r"""Return a callable for the seek method over gRPC. + + Seeks an existing subscription to a point in time or to a given + snapshot, whichever is provided in the request. Snapshots are + used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + Note that both the subscription and the snapshot must be on the + same topic. + + Returns: + Callable[[~.SeekRequest], + ~.SeekResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "seek" not in self._stubs: + self._stubs["seek"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Seek", + request_serializer=pubsub.SeekRequest.serialize, + response_deserializer=pubsub.SeekResponse.deserialize, + ) + return self._stubs["seek"] + + def close(self): + self._logged_channel.close() + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("SubscriberGrpcTransport",) diff --git a/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py new file mode 100644 index 000000000..ad53fe76c --- /dev/null +++ b/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -0,0 +1,1209 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import pickle +import logging as std_logging +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message + +import grpc # type: ignore +import proto # type: ignore +from grpc.experimental import aio # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub +from .base import SubscriberTransport, DEFAULT_CLIENT_INFO +from .grpc import SubscriberGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class SubscriberGrpcAsyncIOTransport(SubscriberTransport): + """gRPC AsyncIO backend transport for Subscriber. + + The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_subscription( + self, + ) -> Callable[[pubsub.Subscription], Awaitable[pubsub.Subscription]]: + r"""Return a callable for the create subscription method over gRPC. + + Creates a subscription to a given topic. See the [resource name + rules] + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + If the subscription already exists, returns ``ALREADY_EXISTS``. + If the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will + assign a random name for this subscription on the same project + as the topic, conforming to the [resource name format] + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + The generated name is populated in the returned Subscription + object. Note that for REST API requests, you must specify a name + in the request. + + Returns: + Callable[[~.Subscription], + Awaitable[~.Subscription]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_subscription" not in self._stubs: + self._stubs["create_subscription"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/CreateSubscription", + request_serializer=pubsub.Subscription.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["create_subscription"] + + @property + def get_subscription( + self, + ) -> Callable[[pubsub.GetSubscriptionRequest], Awaitable[pubsub.Subscription]]: + r"""Return a callable for the get subscription method over gRPC. + + Gets the configuration details of a subscription. + + Returns: + Callable[[~.GetSubscriptionRequest], + Awaitable[~.Subscription]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_subscription" not in self._stubs: + self._stubs["get_subscription"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/GetSubscription", + request_serializer=pubsub.GetSubscriptionRequest.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["get_subscription"] + + @property + def update_subscription( + self, + ) -> Callable[[pubsub.UpdateSubscriptionRequest], Awaitable[pubsub.Subscription]]: + r"""Return a callable for the update subscription method over gRPC. + + Updates an existing subscription by updating the + fields specified in the update mask. Note that certain + properties of a subscription, such as its topic, are not + modifiable. + + Returns: + Callable[[~.UpdateSubscriptionRequest], + Awaitable[~.Subscription]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_subscription" not in self._stubs: + self._stubs["update_subscription"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/UpdateSubscription", + request_serializer=pubsub.UpdateSubscriptionRequest.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["update_subscription"] + + @property + def list_subscriptions( + self, + ) -> Callable[ + [pubsub.ListSubscriptionsRequest], Awaitable[pubsub.ListSubscriptionsResponse] + ]: + r"""Return a callable for the list subscriptions method over gRPC. + + Lists matching subscriptions. + + Returns: + Callable[[~.ListSubscriptionsRequest], + Awaitable[~.ListSubscriptionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_subscriptions" not in self._stubs: + self._stubs["list_subscriptions"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ListSubscriptions", + request_serializer=pubsub.ListSubscriptionsRequest.serialize, + response_deserializer=pubsub.ListSubscriptionsResponse.deserialize, + ) + return self._stubs["list_subscriptions"] + + @property + def delete_subscription( + self, + ) -> Callable[[pubsub.DeleteSubscriptionRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete subscription method over gRPC. + + Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after + deletion will return ``NOT_FOUND``. After a subscription is + deleted, a new one may be created with the same name, but the + new one has no association with the old subscription or its + topic unless the same topic is specified. + + Returns: + Callable[[~.DeleteSubscriptionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_subscription" not in self._stubs: + self._stubs["delete_subscription"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/DeleteSubscription", + request_serializer=pubsub.DeleteSubscriptionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_subscription"] + + @property + def modify_ack_deadline( + self, + ) -> Callable[[pubsub.ModifyAckDeadlineRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the modify ack deadline method over gRPC. + + Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message + by the subscriber, or to make the message available for + redelivery if the processing was interrupted. Note that this + does not modify the subscription-level ``ackDeadlineSeconds`` + used for subsequent messages. + + Returns: + Callable[[~.ModifyAckDeadlineRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_ack_deadline" not in self._stubs: + self._stubs["modify_ack_deadline"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ModifyAckDeadline", + request_serializer=pubsub.ModifyAckDeadlineRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["modify_ack_deadline"] + + @property + def acknowledge( + self, + ) -> Callable[[pubsub.AcknowledgeRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the acknowledge method over gRPC. + + Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the + relevant messages from the subscription. + + Acknowledging a message whose ack deadline has expired may + succeed, but such a message may be redelivered later. + Acknowledging a message more than once will not result in an + error. + + Returns: + Callable[[~.AcknowledgeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "acknowledge" not in self._stubs: + self._stubs["acknowledge"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Acknowledge", + request_serializer=pubsub.AcknowledgeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["acknowledge"] + + @property + def pull(self) -> Callable[[pubsub.PullRequest], Awaitable[pubsub.PullResponse]]: + r"""Return a callable for the pull method over gRPC. + + Pulls messages from the server. + + Returns: + Callable[[~.PullRequest], + Awaitable[~.PullResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pull" not in self._stubs: + self._stubs["pull"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Pull", + request_serializer=pubsub.PullRequest.serialize, + response_deserializer=pubsub.PullResponse.deserialize, + ) + return self._stubs["pull"] + + @property + def streaming_pull( + self, + ) -> Callable[ + [pubsub.StreamingPullRequest], Awaitable[pubsub.StreamingPullResponse] + ]: + r"""Return a callable for the streaming pull method over gRPC. + + Establishes a stream with the server, which sends messages down + to the client. The client streams acknowledgments and ack + deadline modifications back to the server. The server will close + the stream and return the status on any error. The server may + close the stream with status ``UNAVAILABLE`` to reassign + server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by + configuring the underlying RPC channel. + + Returns: + Callable[[~.StreamingPullRequest], + Awaitable[~.StreamingPullResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "streaming_pull" not in self._stubs: + self._stubs["streaming_pull"] = self._logged_channel.stream_stream( + "/google.pubsub.v1.Subscriber/StreamingPull", + request_serializer=pubsub.StreamingPullRequest.serialize, + response_deserializer=pubsub.StreamingPullResponse.deserialize, + ) + return self._stubs["streaming_pull"] + + @property + def modify_push_config( + self, + ) -> Callable[[pubsub.ModifyPushConfigRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the modify push config method over gRPC. + + Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one + (signified by an empty ``PushConfig``) or vice versa, or change + the endpoint URL and other attributes of a push subscription. + Messages will accumulate for delivery continuously through the + call regardless of changes to the ``PushConfig``. + + Returns: + Callable[[~.ModifyPushConfigRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_push_config" not in self._stubs: + self._stubs["modify_push_config"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ModifyPushConfig", + request_serializer=pubsub.ModifyPushConfigRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["modify_push_config"] + + @property + def get_snapshot( + self, + ) -> Callable[[pubsub.GetSnapshotRequest], Awaitable[pubsub.Snapshot]]: + r"""Return a callable for the get snapshot method over gRPC. + + Gets the configuration details of a snapshot. Snapshots are used + in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.GetSnapshotRequest], + Awaitable[~.Snapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_snapshot" not in self._stubs: + self._stubs["get_snapshot"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/GetSnapshot", + request_serializer=pubsub.GetSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["get_snapshot"] + + @property + def list_snapshots( + self, + ) -> Callable[ + [pubsub.ListSnapshotsRequest], Awaitable[pubsub.ListSnapshotsResponse] + ]: + r"""Return a callable for the list snapshots method over gRPC. + + Lists the existing snapshots. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.ListSnapshotsRequest], + Awaitable[~.ListSnapshotsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_snapshots" not in self._stubs: + self._stubs["list_snapshots"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ListSnapshots", + request_serializer=pubsub.ListSnapshotsRequest.serialize, + response_deserializer=pubsub.ListSnapshotsResponse.deserialize, + ) + return self._stubs["list_snapshots"] + + @property + def create_snapshot( + self, + ) -> Callable[[pubsub.CreateSnapshotRequest], Awaitable[pubsub.Snapshot]]: + r"""Return a callable for the create snapshot method over gRPC. + + Creates a snapshot from the requested subscription. Snapshots + are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the + resulting snapshot would expire in less than 1 hour -- then + ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this + snapshot on the same project as the subscription, conforming to + the [resource name format] + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the + request. + + Returns: + Callable[[~.CreateSnapshotRequest], + Awaitable[~.Snapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_snapshot" not in self._stubs: + self._stubs["create_snapshot"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/CreateSnapshot", + request_serializer=pubsub.CreateSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["create_snapshot"] + + @property + def update_snapshot( + self, + ) -> Callable[[pubsub.UpdateSnapshotRequest], Awaitable[pubsub.Snapshot]]: + r"""Return a callable for the update snapshot method over gRPC. + + Updates an existing snapshot by updating the fields specified in + the update mask. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.UpdateSnapshotRequest], + Awaitable[~.Snapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_snapshot" not in self._stubs: + self._stubs["update_snapshot"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/UpdateSnapshot", + request_serializer=pubsub.UpdateSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["update_snapshot"] + + @property + def delete_snapshot( + self, + ) -> Callable[[pubsub.DeleteSnapshotRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete snapshot method over gRPC. + + Removes an existing snapshot. Snapshots are used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + When the snapshot is deleted, all messages retained in the + snapshot are immediately dropped. After a snapshot is deleted, a + new one may be created with the same name, but the new one has + no association with the old snapshot or its subscription, unless + the same subscription is specified. + + Returns: + Callable[[~.DeleteSnapshotRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_snapshot" not in self._stubs: + self._stubs["delete_snapshot"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/DeleteSnapshot", + request_serializer=pubsub.DeleteSnapshotRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_snapshot"] + + @property + def seek(self) -> Callable[[pubsub.SeekRequest], Awaitable[pubsub.SeekResponse]]: + r"""Return a callable for the seek method over gRPC. + + Seeks an existing subscription to a point in time or to a given + snapshot, whichever is provided in the request. Snapshots are + used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + Note that both the subscription and the snapshot must be on the + same topic. + + Returns: + Callable[[~.SeekRequest], + Awaitable[~.SeekResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "seek" not in self._stubs: + self._stubs["seek"] = self._logged_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Seek", + request_serializer=pubsub.SeekRequest.serialize, + response_deserializer=pubsub.SeekResponse.deserialize, + ) + return self._stubs["seek"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_subscription: self._wrap_method( + self.create_subscription, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_subscription: self._wrap_method( + self.get_subscription, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_subscription: self._wrap_method( + self.update_subscription, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_subscriptions: self._wrap_method( + self.list_subscriptions, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_subscription: self._wrap_method( + self.delete_subscription, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.modify_ack_deadline: self._wrap_method( + self.modify_ack_deadline, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.acknowledge: self._wrap_method( + self.acknowledge, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.pull: self._wrap_method( + self.pull, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.streaming_pull: self._wrap_method( + self.streaming_pull, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=4, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=client_info, + ), + self.modify_push_config: self._wrap_method( + self.modify_push_config, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_snapshot: self._wrap_method( + self.get_snapshot, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_snapshots: self._wrap_method( + self.list_snapshots, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_snapshot: self._wrap_method( + self.create_snapshot, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_snapshot: self._wrap_method( + self.update_snapshot, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_snapshot: self._wrap_method( + self.delete_snapshot, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.seek: self._wrap_method( + self.seek, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("SubscriberGrpcAsyncIOTransport",) diff --git a/google/pubsub_v1/services/subscriber/transports/rest.py b/google/pubsub_v1/services/subscriber/transports/rest.py new file mode 100644 index 000000000..50a247cef --- /dev/null +++ b/google/pubsub_v1/services/subscriber/transports/rest.py @@ -0,0 +1,3562 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging +import json # type: ignore + +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import gapic_v1 +import google.protobuf + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore + +from requests import __version__ as requests_version +import dataclasses +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + + +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub + + +from .rest_base import _BaseSubscriberRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class SubscriberRestInterceptor: + """Interceptor for Subscriber. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SubscriberRestTransport. + + .. code-block:: python + class MyCustomSubscriberInterceptor(SubscriberRestInterceptor): + def pre_acknowledge(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_create_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_subscription(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_subscription(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_subscription(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_subscription(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_subscription(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_snapshots(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_snapshots(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_subscriptions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_subscriptions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_modify_ack_deadline(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_modify_push_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_pull(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_pull(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_seek(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_seek(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_subscription(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_subscription(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SubscriberRestTransport(interceptor=MyCustomSubscriberInterceptor()) + client = SubscriberClient(transport=transport) + + + """ + + def pre_acknowledge( + self, + request: pubsub.AcknowledgeRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.AcknowledgeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for acknowledge + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def pre_create_snapshot( + self, + request: pubsub.CreateSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.CreateSnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_create_snapshot(self, response: pubsub.Snapshot) -> pubsub.Snapshot: + """Post-rpc interceptor for create_snapshot + + DEPRECATED. Please use the `post_create_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. This `post_create_snapshot` interceptor runs + before the `post_create_snapshot_with_metadata` interceptor. + """ + return response + + def post_create_snapshot_with_metadata( + self, + response: pubsub.Snapshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.Snapshot, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_create_snapshot_with_metadata` + interceptor in new development instead of the `post_create_snapshot` interceptor. + When both interceptors are used, this `post_create_snapshot_with_metadata` interceptor runs after the + `post_create_snapshot` interceptor. The (possibly modified) response returned by + `post_create_snapshot` will be passed to + `post_create_snapshot_with_metadata`. + """ + return response, metadata + + def pre_create_subscription( + self, + request: pubsub.Subscription, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.Subscription, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_subscription + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_create_subscription( + self, response: pubsub.Subscription + ) -> pubsub.Subscription: + """Post-rpc interceptor for create_subscription + + DEPRECATED. Please use the `post_create_subscription_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. This `post_create_subscription` interceptor runs + before the `post_create_subscription_with_metadata` interceptor. + """ + return response + + def post_create_subscription_with_metadata( + self, + response: pubsub.Subscription, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.Subscription, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_subscription + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_create_subscription_with_metadata` + interceptor in new development instead of the `post_create_subscription` interceptor. + When both interceptors are used, this `post_create_subscription_with_metadata` interceptor runs after the + `post_create_subscription` interceptor. The (possibly modified) response returned by + `post_create_subscription` will be passed to + `post_create_subscription_with_metadata`. + """ + return response, metadata + + def pre_delete_snapshot( + self, + request: pubsub.DeleteSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.DeleteSnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def pre_delete_subscription( + self, + request: pubsub.DeleteSubscriptionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.DeleteSubscriptionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_subscription + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def pre_get_snapshot( + self, + request: pubsub.GetSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.GetSnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_get_snapshot(self, response: pubsub.Snapshot) -> pubsub.Snapshot: + """Post-rpc interceptor for get_snapshot + + DEPRECATED. Please use the `post_get_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. This `post_get_snapshot` interceptor runs + before the `post_get_snapshot_with_metadata` interceptor. + """ + return response + + def post_get_snapshot_with_metadata( + self, + response: pubsub.Snapshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.Snapshot, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_get_snapshot_with_metadata` + interceptor in new development instead of the `post_get_snapshot` interceptor. + When both interceptors are used, this `post_get_snapshot_with_metadata` interceptor runs after the + `post_get_snapshot` interceptor. The (possibly modified) response returned by + `post_get_snapshot` will be passed to + `post_get_snapshot_with_metadata`. + """ + return response, metadata + + def pre_get_subscription( + self, + request: pubsub.GetSubscriptionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.GetSubscriptionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_subscription + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_get_subscription( + self, response: pubsub.Subscription + ) -> pubsub.Subscription: + """Post-rpc interceptor for get_subscription + + DEPRECATED. Please use the `post_get_subscription_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. This `post_get_subscription` interceptor runs + before the `post_get_subscription_with_metadata` interceptor. + """ + return response + + def post_get_subscription_with_metadata( + self, + response: pubsub.Subscription, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.Subscription, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_subscription + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_get_subscription_with_metadata` + interceptor in new development instead of the `post_get_subscription` interceptor. + When both interceptors are used, this `post_get_subscription_with_metadata` interceptor runs after the + `post_get_subscription` interceptor. The (possibly modified) response returned by + `post_get_subscription` will be passed to + `post_get_subscription_with_metadata`. + """ + return response, metadata + + def pre_list_snapshots( + self, + request: pubsub.ListSnapshotsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.ListSnapshotsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_snapshots + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_list_snapshots( + self, response: pubsub.ListSnapshotsResponse + ) -> pubsub.ListSnapshotsResponse: + """Post-rpc interceptor for list_snapshots + + DEPRECATED. Please use the `post_list_snapshots_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. This `post_list_snapshots` interceptor runs + before the `post_list_snapshots_with_metadata` interceptor. + """ + return response + + def post_list_snapshots_with_metadata( + self, + response: pubsub.ListSnapshotsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.ListSnapshotsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_snapshots + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_list_snapshots_with_metadata` + interceptor in new development instead of the `post_list_snapshots` interceptor. + When both interceptors are used, this `post_list_snapshots_with_metadata` interceptor runs after the + `post_list_snapshots` interceptor. The (possibly modified) response returned by + `post_list_snapshots` will be passed to + `post_list_snapshots_with_metadata`. + """ + return response, metadata + + def pre_list_subscriptions( + self, + request: pubsub.ListSubscriptionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.ListSubscriptionsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_subscriptions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_list_subscriptions( + self, response: pubsub.ListSubscriptionsResponse + ) -> pubsub.ListSubscriptionsResponse: + """Post-rpc interceptor for list_subscriptions + + DEPRECATED. Please use the `post_list_subscriptions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. This `post_list_subscriptions` interceptor runs + before the `post_list_subscriptions_with_metadata` interceptor. + """ + return response + + def post_list_subscriptions_with_metadata( + self, + response: pubsub.ListSubscriptionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.ListSubscriptionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_subscriptions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_list_subscriptions_with_metadata` + interceptor in new development instead of the `post_list_subscriptions` interceptor. + When both interceptors are used, this `post_list_subscriptions_with_metadata` interceptor runs after the + `post_list_subscriptions` interceptor. The (possibly modified) response returned by + `post_list_subscriptions` will be passed to + `post_list_subscriptions_with_metadata`. + """ + return response, metadata + + def pre_modify_ack_deadline( + self, + request: pubsub.ModifyAckDeadlineRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.ModifyAckDeadlineRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for modify_ack_deadline + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def pre_modify_push_config( + self, + request: pubsub.ModifyPushConfigRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.ModifyPushConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for modify_push_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def pre_pull( + self, + request: pubsub.PullRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.PullRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for pull + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_pull(self, response: pubsub.PullResponse) -> pubsub.PullResponse: + """Post-rpc interceptor for pull + + DEPRECATED. Please use the `post_pull_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. This `post_pull` interceptor runs + before the `post_pull_with_metadata` interceptor. + """ + return response + + def post_pull_with_metadata( + self, + response: pubsub.PullResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.PullResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for pull + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_pull_with_metadata` + interceptor in new development instead of the `post_pull` interceptor. + When both interceptors are used, this `post_pull_with_metadata` interceptor runs after the + `post_pull` interceptor. The (possibly modified) response returned by + `post_pull` will be passed to + `post_pull_with_metadata`. + """ + return response, metadata + + def pre_seek( + self, + request: pubsub.SeekRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.SeekRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for seek + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_seek(self, response: pubsub.SeekResponse) -> pubsub.SeekResponse: + """Post-rpc interceptor for seek + + DEPRECATED. Please use the `post_seek_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. This `post_seek` interceptor runs + before the `post_seek_with_metadata` interceptor. + """ + return response + + def post_seek_with_metadata( + self, + response: pubsub.SeekResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.SeekResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for seek + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_seek_with_metadata` + interceptor in new development instead of the `post_seek` interceptor. + When both interceptors are used, this `post_seek_with_metadata` interceptor runs after the + `post_seek` interceptor. The (possibly modified) response returned by + `post_seek` will be passed to + `post_seek_with_metadata`. + """ + return response, metadata + + def pre_update_snapshot( + self, + request: pubsub.UpdateSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.UpdateSnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for update_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_update_snapshot(self, response: pubsub.Snapshot) -> pubsub.Snapshot: + """Post-rpc interceptor for update_snapshot + + DEPRECATED. Please use the `post_update_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. This `post_update_snapshot` interceptor runs + before the `post_update_snapshot_with_metadata` interceptor. + """ + return response + + def post_update_snapshot_with_metadata( + self, + response: pubsub.Snapshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.Snapshot, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_update_snapshot_with_metadata` + interceptor in new development instead of the `post_update_snapshot` interceptor. + When both interceptors are used, this `post_update_snapshot_with_metadata` interceptor runs after the + `post_update_snapshot` interceptor. The (possibly modified) response returned by + `post_update_snapshot` will be passed to + `post_update_snapshot_with_metadata`. + """ + return response, metadata + + def pre_update_subscription( + self, + request: pubsub.UpdateSubscriptionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.UpdateSubscriptionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_subscription + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_update_subscription( + self, response: pubsub.Subscription + ) -> pubsub.Subscription: + """Post-rpc interceptor for update_subscription + + DEPRECATED. Please use the `post_update_subscription_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. This `post_update_subscription` interceptor runs + before the `post_update_subscription_with_metadata` interceptor. + """ + return response + + def post_update_subscription_with_metadata( + self, + response: pubsub.Subscription, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.Subscription, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_subscription + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_update_subscription_with_metadata` + interceptor in new development instead of the `post_update_subscription` interceptor. + When both interceptors are used, this `post_update_subscription_with_metadata` interceptor runs after the + `post_update_subscription` interceptor. The (possibly modified) response returned by + `post_update_subscription` will be passed to + `post_update_subscription_with_metadata`. + """ + return response, metadata + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SubscriberRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SubscriberRestInterceptor + + +class SubscriberRestTransport(_BaseSubscriberRestTransport): + """REST backend synchronous transport for Subscriber. + + The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SubscriberRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SubscriberRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Acknowledge( + _BaseSubscriberRestTransport._BaseAcknowledge, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.Acknowledge") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: pubsub.AcknowledgeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the acknowledge method over HTTP. + + Args: + request (~.pubsub.AcknowledgeRequest): + The request object. Request for the Acknowledge method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseSubscriberRestTransport._BaseAcknowledge._get_http_options() + ) + + request, metadata = self._interceptor.pre_acknowledge(request, metadata) + transcoded_request = ( + _BaseSubscriberRestTransport._BaseAcknowledge._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseSubscriberRestTransport._BaseAcknowledge._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseSubscriberRestTransport._BaseAcknowledge._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.Acknowledge", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "Acknowledge", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._Acknowledge._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _CreateSnapshot( + _BaseSubscriberRestTransport._BaseCreateSnapshot, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.CreateSnapshot") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: pubsub.CreateSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Snapshot: + r"""Call the create snapshot method over HTTP. + + Args: + request (~.pubsub.CreateSnapshotRequest): + The request object. Request for the ``CreateSnapshot`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + + http_options = ( + _BaseSubscriberRestTransport._BaseCreateSnapshot._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_snapshot(request, metadata) + transcoded_request = _BaseSubscriberRestTransport._BaseCreateSnapshot._get_transcoded_request( + http_options, request + ) + + body = ( + _BaseSubscriberRestTransport._BaseCreateSnapshot._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseSubscriberRestTransport._BaseCreateSnapshot._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.CreateSnapshot", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "CreateSnapshot", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._CreateSnapshot._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Snapshot() + pb_resp = pubsub.Snapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_snapshot_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Snapshot.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.create_snapshot", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "CreateSnapshot", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateSubscription( + _BaseSubscriberRestTransport._BaseCreateSubscription, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.CreateSubscription") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: pubsub.Subscription, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Subscription: + r"""Call the create subscription method over HTTP. + + Args: + request (~.pubsub.Subscription): + The request object. A subscription resource. If none of ``push_config``, + ``bigquery_config``, or ``cloud_storage_config`` is set, + then the subscriber will pull and ack messages using API + methods. At most one of these fields may be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.Subscription: + A subscription resource. If none of ``push_config``, + ``bigquery_config``, or ``cloud_storage_config`` is set, + then the subscriber will pull and ack messages using API + methods. At most one of these fields may be set. + + """ + + http_options = ( + _BaseSubscriberRestTransport._BaseCreateSubscription._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_subscription( + request, metadata + ) + transcoded_request = _BaseSubscriberRestTransport._BaseCreateSubscription._get_transcoded_request( + http_options, request + ) + + body = _BaseSubscriberRestTransport._BaseCreateSubscription._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSubscriberRestTransport._BaseCreateSubscription._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.CreateSubscription", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "CreateSubscription", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._CreateSubscription._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Subscription() + pb_resp = pubsub.Subscription.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_subscription(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_subscription_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Subscription.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.create_subscription", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "CreateSubscription", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteSnapshot( + _BaseSubscriberRestTransport._BaseDeleteSnapshot, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.DeleteSnapshot") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: pubsub.DeleteSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete snapshot method over HTTP. + + Args: + request (~.pubsub.DeleteSnapshotRequest): + The request object. Request for the ``DeleteSnapshot`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseSubscriberRestTransport._BaseDeleteSnapshot._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_snapshot(request, metadata) + transcoded_request = _BaseSubscriberRestTransport._BaseDeleteSnapshot._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = ( + _BaseSubscriberRestTransport._BaseDeleteSnapshot._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.DeleteSnapshot", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "DeleteSnapshot", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._DeleteSnapshot._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteSubscription( + _BaseSubscriberRestTransport._BaseDeleteSubscription, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.DeleteSubscription") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: pubsub.DeleteSubscriptionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete subscription method over HTTP. + + Args: + request (~.pubsub.DeleteSubscriptionRequest): + The request object. Request for the DeleteSubscription + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseSubscriberRestTransport._BaseDeleteSubscription._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_subscription( + request, metadata + ) + transcoded_request = _BaseSubscriberRestTransport._BaseDeleteSubscription._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSubscriberRestTransport._BaseDeleteSubscription._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.DeleteSubscription", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "DeleteSubscription", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._DeleteSubscription._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetSnapshot( + _BaseSubscriberRestTransport._BaseGetSnapshot, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.GetSnapshot") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: pubsub.GetSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Snapshot: + r"""Call the get snapshot method over HTTP. + + Args: + request (~.pubsub.GetSnapshotRequest): + The request object. Request for the GetSnapshot method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + + http_options = ( + _BaseSubscriberRestTransport._BaseGetSnapshot._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_snapshot(request, metadata) + transcoded_request = ( + _BaseSubscriberRestTransport._BaseGetSnapshot._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseSubscriberRestTransport._BaseGetSnapshot._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.GetSnapshot", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "GetSnapshot", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._GetSnapshot._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Snapshot() + pb_resp = pubsub.Snapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_snapshot_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Snapshot.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.get_snapshot", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "GetSnapshot", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetSubscription( + _BaseSubscriberRestTransport._BaseGetSubscription, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.GetSubscription") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: pubsub.GetSubscriptionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Subscription: + r"""Call the get subscription method over HTTP. + + Args: + request (~.pubsub.GetSubscriptionRequest): + The request object. Request for the GetSubscription + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.Subscription: + A subscription resource. If none of ``push_config``, + ``bigquery_config``, or ``cloud_storage_config`` is set, + then the subscriber will pull and ack messages using API + methods. At most one of these fields may be set. + + """ + + http_options = ( + _BaseSubscriberRestTransport._BaseGetSubscription._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_subscription( + request, metadata + ) + transcoded_request = _BaseSubscriberRestTransport._BaseGetSubscription._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSubscriberRestTransport._BaseGetSubscription._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.GetSubscription", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "GetSubscription", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._GetSubscription._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Subscription() + pb_resp = pubsub.Subscription.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_subscription(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_subscription_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Subscription.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.get_subscription", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "GetSubscription", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListSnapshots( + _BaseSubscriberRestTransport._BaseListSnapshots, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.ListSnapshots") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: pubsub.ListSnapshotsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.ListSnapshotsResponse: + r"""Call the list snapshots method over HTTP. + + Args: + request (~.pubsub.ListSnapshotsRequest): + The request object. Request for the ``ListSnapshots`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.ListSnapshotsResponse: + Response for the ``ListSnapshots`` method. + """ + + http_options = ( + _BaseSubscriberRestTransport._BaseListSnapshots._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_snapshots(request, metadata) + transcoded_request = ( + _BaseSubscriberRestTransport._BaseListSnapshots._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseSubscriberRestTransport._BaseListSnapshots._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.ListSnapshots", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "ListSnapshots", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._ListSnapshots._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.ListSnapshotsResponse() + pb_resp = pubsub.ListSnapshotsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_snapshots(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_snapshots_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.ListSnapshotsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.list_snapshots", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "ListSnapshots", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListSubscriptions( + _BaseSubscriberRestTransport._BaseListSubscriptions, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.ListSubscriptions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: pubsub.ListSubscriptionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.ListSubscriptionsResponse: + r"""Call the list subscriptions method over HTTP. + + Args: + request (~.pubsub.ListSubscriptionsRequest): + The request object. Request for the ``ListSubscriptions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.ListSubscriptionsResponse: + Response for the ``ListSubscriptions`` method. + """ + + http_options = ( + _BaseSubscriberRestTransport._BaseListSubscriptions._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_subscriptions( + request, metadata + ) + transcoded_request = _BaseSubscriberRestTransport._BaseListSubscriptions._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseSubscriberRestTransport._BaseListSubscriptions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.ListSubscriptions", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "ListSubscriptions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._ListSubscriptions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.ListSubscriptionsResponse() + pb_resp = pubsub.ListSubscriptionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_subscriptions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_subscriptions_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.ListSubscriptionsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.list_subscriptions", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "ListSubscriptions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ModifyAckDeadline( + _BaseSubscriberRestTransport._BaseModifyAckDeadline, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.ModifyAckDeadline") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: pubsub.ModifyAckDeadlineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the modify ack deadline method over HTTP. + + Args: + request (~.pubsub.ModifyAckDeadlineRequest): + The request object. Request for the ModifyAckDeadline + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseSubscriberRestTransport._BaseModifyAckDeadline._get_http_options() + ) + + request, metadata = self._interceptor.pre_modify_ack_deadline( + request, metadata + ) + transcoded_request = _BaseSubscriberRestTransport._BaseModifyAckDeadline._get_transcoded_request( + http_options, request + ) + + body = _BaseSubscriberRestTransport._BaseModifyAckDeadline._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSubscriberRestTransport._BaseModifyAckDeadline._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.ModifyAckDeadline", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "ModifyAckDeadline", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._ModifyAckDeadline._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _ModifyPushConfig( + _BaseSubscriberRestTransport._BaseModifyPushConfig, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.ModifyPushConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: pubsub.ModifyPushConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the modify push config method over HTTP. + + Args: + request (~.pubsub.ModifyPushConfigRequest): + The request object. Request for the ModifyPushConfig + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseSubscriberRestTransport._BaseModifyPushConfig._get_http_options() + ) + + request, metadata = self._interceptor.pre_modify_push_config( + request, metadata + ) + transcoded_request = _BaseSubscriberRestTransport._BaseModifyPushConfig._get_transcoded_request( + http_options, request + ) + + body = _BaseSubscriberRestTransport._BaseModifyPushConfig._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSubscriberRestTransport._BaseModifyPushConfig._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.ModifyPushConfig", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "ModifyPushConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._ModifyPushConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _Pull(_BaseSubscriberRestTransport._BasePull, SubscriberRestStub): + def __hash__(self): + return hash("SubscriberRestTransport.Pull") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: pubsub.PullRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.PullResponse: + r"""Call the pull method over HTTP. + + Args: + request (~.pubsub.PullRequest): + The request object. Request for the ``Pull`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.PullResponse: + Response for the ``Pull`` method. + """ + + http_options = _BaseSubscriberRestTransport._BasePull._get_http_options() + + request, metadata = self._interceptor.pre_pull(request, metadata) + transcoded_request = ( + _BaseSubscriberRestTransport._BasePull._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseSubscriberRestTransport._BasePull._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseSubscriberRestTransport._BasePull._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.Pull", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "Pull", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._Pull._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.PullResponse() + pb_resp = pubsub.PullResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_pull(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_pull_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.PullResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.pull", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "Pull", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Seek(_BaseSubscriberRestTransport._BaseSeek, SubscriberRestStub): + def __hash__(self): + return hash("SubscriberRestTransport.Seek") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: pubsub.SeekRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.SeekResponse: + r"""Call the seek method over HTTP. + + Args: + request (~.pubsub.SeekRequest): + The request object. Request for the ``Seek`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.SeekResponse: + Response for the ``Seek`` method (this response is + empty). + + """ + + http_options = _BaseSubscriberRestTransport._BaseSeek._get_http_options() + + request, metadata = self._interceptor.pre_seek(request, metadata) + transcoded_request = ( + _BaseSubscriberRestTransport._BaseSeek._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseSubscriberRestTransport._BaseSeek._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseSubscriberRestTransport._BaseSeek._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.Seek", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "Seek", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._Seek._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.SeekResponse() + pb_resp = pubsub.SeekResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_seek(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_seek_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.SeekResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.seek", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "Seek", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _StreamingPull( + _BaseSubscriberRestTransport._BaseStreamingPull, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.StreamingPull") + + def __call__( + self, + request: pubsub.StreamingPullRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> rest_streaming.ResponseIterator: + raise NotImplementedError( + "Method StreamingPull is not available over REST transport" + ) + + class _UpdateSnapshot( + _BaseSubscriberRestTransport._BaseUpdateSnapshot, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.UpdateSnapshot") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: pubsub.UpdateSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Snapshot: + r"""Call the update snapshot method over HTTP. + + Args: + request (~.pubsub.UpdateSnapshotRequest): + The request object. Request for the UpdateSnapshot + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + + http_options = ( + _BaseSubscriberRestTransport._BaseUpdateSnapshot._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_snapshot(request, metadata) + transcoded_request = _BaseSubscriberRestTransport._BaseUpdateSnapshot._get_transcoded_request( + http_options, request + ) + + body = ( + _BaseSubscriberRestTransport._BaseUpdateSnapshot._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseSubscriberRestTransport._BaseUpdateSnapshot._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.UpdateSnapshot", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "UpdateSnapshot", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._UpdateSnapshot._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Snapshot() + pb_resp = pubsub.Snapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_snapshot_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Snapshot.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.update_snapshot", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "UpdateSnapshot", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateSubscription( + _BaseSubscriberRestTransport._BaseUpdateSubscription, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.UpdateSubscription") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: pubsub.UpdateSubscriptionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pubsub.Subscription: + r"""Call the update subscription method over HTTP. + + Args: + request (~.pubsub.UpdateSubscriptionRequest): + The request object. Request for the UpdateSubscription + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.pubsub.Subscription: + A subscription resource. If none of ``push_config``, + ``bigquery_config``, or ``cloud_storage_config`` is set, + then the subscriber will pull and ack messages using API + methods. At most one of these fields may be set. + + """ + + http_options = ( + _BaseSubscriberRestTransport._BaseUpdateSubscription._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_subscription( + request, metadata + ) + transcoded_request = _BaseSubscriberRestTransport._BaseUpdateSubscription._get_transcoded_request( + http_options, request + ) + + body = _BaseSubscriberRestTransport._BaseUpdateSubscription._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSubscriberRestTransport._BaseUpdateSubscription._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.UpdateSubscription", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "UpdateSubscription", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._UpdateSubscription._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Subscription() + pb_resp = pubsub.Subscription.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_subscription(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_subscription_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Subscription.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.update_subscription", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "UpdateSubscription", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def acknowledge(self) -> Callable[[pubsub.AcknowledgeRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Acknowledge(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_snapshot( + self, + ) -> Callable[[pubsub.CreateSnapshotRequest], pubsub.Snapshot]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_subscription( + self, + ) -> Callable[[pubsub.Subscription], pubsub.Subscription]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSubscription(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_snapshot( + self, + ) -> Callable[[pubsub.DeleteSnapshotRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_subscription( + self, + ) -> Callable[[pubsub.DeleteSubscriptionRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSubscription(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_snapshot(self) -> Callable[[pubsub.GetSnapshotRequest], pubsub.Snapshot]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_subscription( + self, + ) -> Callable[[pubsub.GetSubscriptionRequest], pubsub.Subscription]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSubscription(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_snapshots( + self, + ) -> Callable[[pubsub.ListSnapshotsRequest], pubsub.ListSnapshotsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSnapshots(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_subscriptions( + self, + ) -> Callable[[pubsub.ListSubscriptionsRequest], pubsub.ListSubscriptionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSubscriptions(self._session, self._host, self._interceptor) # type: ignore + + @property + def modify_ack_deadline( + self, + ) -> Callable[[pubsub.ModifyAckDeadlineRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ModifyAckDeadline(self._session, self._host, self._interceptor) # type: ignore + + @property + def modify_push_config( + self, + ) -> Callable[[pubsub.ModifyPushConfigRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ModifyPushConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def pull(self) -> Callable[[pubsub.PullRequest], pubsub.PullResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Pull(self._session, self._host, self._interceptor) # type: ignore + + @property + def seek(self) -> Callable[[pubsub.SeekRequest], pubsub.SeekResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Seek(self._session, self._host, self._interceptor) # type: ignore + + @property + def streaming_pull( + self, + ) -> Callable[[pubsub.StreamingPullRequest], pubsub.StreamingPullResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StreamingPull(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_snapshot( + self, + ) -> Callable[[pubsub.UpdateSnapshotRequest], pubsub.Snapshot]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_subscription( + self, + ) -> Callable[[pubsub.UpdateSubscriptionRequest], pubsub.Subscription]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSubscription(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy( + _BaseSubscriberRestTransport._BaseGetIamPolicy, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options = ( + _BaseSubscriberRestTransport._BaseGetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = ( + _BaseSubscriberRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseSubscriberRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.GetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberAsyncClient.GetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy( + _BaseSubscriberRestTransport._BaseSetIamPolicy, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> policy_pb2.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options = ( + _BaseSubscriberRestTransport._BaseSetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = ( + _BaseSubscriberRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + ) + + body = ( + _BaseSubscriberRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseSubscriberRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.SetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = policy_pb2.Policy() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberAsyncClient.SetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions( + _BaseSubscriberRestTransport._BaseTestIamPermissions, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options = ( + _BaseSubscriberRestTransport._BaseTestIamPermissions._get_http_options() + ) + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseSubscriberRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseSubscriberRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseSubscriberRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.TestIamPermissions", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SubscriberRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberAsyncClient.TestIamPermissions", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SubscriberRestTransport",) diff --git a/google/pubsub_v1/services/subscriber/transports/rest_base.py b/google/pubsub_v1/services/subscriber/transports/rest_base.py new file mode 100644 index 000000000..f4fb07656 --- /dev/null +++ b/google/pubsub_v1/services/subscriber/transports/rest_base.py @@ -0,0 +1,1024 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from .base import SubscriberTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub + + +class _BaseSubscriberRestTransport(SubscriberTransport): + """Base REST backend transport for Subscriber. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseAcknowledge: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:acknowledge", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.AcknowledgeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseAcknowledge._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateSnapshot: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1/{name=projects/*/snapshots/*}", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.CreateSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseCreateSnapshot._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateSubscription: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1/{name=projects/*/subscriptions/*}", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.Subscription.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseCreateSubscription._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteSnapshot: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{snapshot=projects/*/snapshots/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.DeleteSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseDeleteSnapshot._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteSubscription: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{subscription=projects/*/subscriptions/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.DeleteSubscriptionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseDeleteSubscription._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetSnapshot: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{snapshot=projects/*/snapshots/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.GetSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseGetSnapshot._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetSubscription: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{subscription=projects/*/subscriptions/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.GetSubscriptionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseGetSubscription._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSnapshots: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{project=projects/*}/snapshots", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.ListSnapshotsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseListSnapshots._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSubscriptions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{project=projects/*}/subscriptions", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.ListSubscriptionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseListSubscriptions._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseModifyAckDeadline: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.ModifyAckDeadlineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseModifyAckDeadline._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseModifyPushConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.ModifyPushConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseModifyPushConfig._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BasePull: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:pull", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.PullRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BasePull._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseSeek: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:seek", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.SeekRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseSeek._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseStreamingPull: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + class _BaseUpdateSnapshot: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{snapshot.name=projects/*/snapshots/*}", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.UpdateSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseUpdateSnapshot._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateSubscription: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{subscription.name=projects/*/subscriptions/*}", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.UpdateSubscriptionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseUpdateSubscription._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/topics/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/subscriptions/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/snapshots/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/schemas/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseSubscriberRestTransport",) diff --git a/google/pubsub_v1/types/__init__.py b/google/pubsub_v1/types/__init__.py new file mode 100644 index 000000000..85c6b901b --- /dev/null +++ b/google/pubsub_v1/types/__init__.py @@ -0,0 +1,169 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Union + +from .pubsub import ( + AcknowledgeRequest, + BigQueryConfig, + CloudStorageConfig, + CreateSnapshotRequest, + DeadLetterPolicy, + DeleteSnapshotRequest, + DeleteSubscriptionRequest, + DeleteTopicRequest, + DetachSubscriptionRequest, + DetachSubscriptionResponse, + ExpirationPolicy, + GetSnapshotRequest, + GetSubscriptionRequest, + GetTopicRequest, + IngestionDataSourceSettings, + IngestionFailureEvent, + JavaScriptUDF, + ListSnapshotsRequest, + ListSnapshotsResponse, + ListSubscriptionsRequest, + ListSubscriptionsResponse, + ListTopicSnapshotsRequest, + ListTopicSnapshotsResponse, + ListTopicsRequest, + ListTopicsResponse, + ListTopicSubscriptionsRequest, + ListTopicSubscriptionsResponse, + MessageStoragePolicy, + MessageTransform, + ModifyAckDeadlineRequest, + ModifyPushConfigRequest, + PlatformLogsSettings, + PublishRequest, + PublishResponse, + PubsubMessage, + PullRequest, + PullResponse, + PushConfig, + ReceivedMessage, + RetryPolicy, + SchemaSettings, + SeekRequest, + SeekResponse, + Snapshot, + StreamingPullRequest, + StreamingPullResponse, + Subscription, + Topic, + UpdateSnapshotRequest, + UpdateSubscriptionRequest, + UpdateTopicRequest, +) +from .schema import ( + CommitSchemaRequest, + CreateSchemaRequest, + DeleteSchemaRequest, + DeleteSchemaRevisionRequest, + GetSchemaRequest, + ListSchemaRevisionsRequest, + ListSchemaRevisionsResponse, + ListSchemasRequest, + ListSchemasResponse, + RollbackSchemaRequest, + Schema, + ValidateMessageRequest, + ValidateMessageResponse, + ValidateSchemaRequest, + ValidateSchemaResponse, + Encoding, + SchemaView, +) + +TimeoutType = Union[ + int, + float, + "google.api_core.timeout.ConstantTimeout", + "google.api_core.timeout.ExponentialTimeout", +] +"""The type of the timeout parameter of publisher client methods.""" + +__all__ = ( + "TimeoutType", + "AcknowledgeRequest", + "BigQueryConfig", + "CloudStorageConfig", + "CreateSnapshotRequest", + "DeadLetterPolicy", + "DeleteSnapshotRequest", + "DeleteSubscriptionRequest", + "DeleteTopicRequest", + "DetachSubscriptionRequest", + "DetachSubscriptionResponse", + "ExpirationPolicy", + "GetSnapshotRequest", + "GetSubscriptionRequest", + "GetTopicRequest", + "IngestionDataSourceSettings", + "IngestionFailureEvent", + "JavaScriptUDF", + "ListSnapshotsRequest", + "ListSnapshotsResponse", + "ListSubscriptionsRequest", + "ListSubscriptionsResponse", + "ListTopicSnapshotsRequest", + "ListTopicSnapshotsResponse", + "ListTopicsRequest", + "ListTopicsResponse", + "ListTopicSubscriptionsRequest", + "ListTopicSubscriptionsResponse", + "MessageStoragePolicy", + "MessageTransform", + "ModifyAckDeadlineRequest", + "ModifyPushConfigRequest", + "PlatformLogsSettings", + "PublishRequest", + "PublishResponse", + "PubsubMessage", + "PullRequest", + "PullResponse", + "PushConfig", + "ReceivedMessage", + "RetryPolicy", + "SchemaSettings", + "SeekRequest", + "SeekResponse", + "Snapshot", + "StreamingPullRequest", + "StreamingPullResponse", + "Subscription", + "Topic", + "UpdateSnapshotRequest", + "UpdateSubscriptionRequest", + "UpdateTopicRequest", + "CommitSchemaRequest", + "CreateSchemaRequest", + "DeleteSchemaRequest", + "DeleteSchemaRevisionRequest", + "GetSchemaRequest", + "ListSchemaRevisionsRequest", + "ListSchemaRevisionsResponse", + "ListSchemasRequest", + "ListSchemasResponse", + "RollbackSchemaRequest", + "Schema", + "ValidateMessageRequest", + "ValidateMessageResponse", + "ValidateSchemaRequest", + "ValidateSchemaResponse", + "Encoding", + "SchemaView", +) diff --git a/google/pubsub_v1/types/pubsub.py b/google/pubsub_v1/types/pubsub.py new file mode 100644 index 000000000..26c13fb18 --- /dev/null +++ b/google/pubsub_v1/types/pubsub.py @@ -0,0 +1,3602 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.pubsub_v1.types import schema as gp_schema + + +__protobuf__ = proto.module( + package="google.pubsub.v1", + manifest={ + "MessageStoragePolicy", + "SchemaSettings", + "IngestionDataSourceSettings", + "PlatformLogsSettings", + "IngestionFailureEvent", + "JavaScriptUDF", + "MessageTransform", + "Topic", + "PubsubMessage", + "GetTopicRequest", + "UpdateTopicRequest", + "PublishRequest", + "PublishResponse", + "ListTopicsRequest", + "ListTopicsResponse", + "ListTopicSubscriptionsRequest", + "ListTopicSubscriptionsResponse", + "ListTopicSnapshotsRequest", + "ListTopicSnapshotsResponse", + "DeleteTopicRequest", + "DetachSubscriptionRequest", + "DetachSubscriptionResponse", + "Subscription", + "RetryPolicy", + "DeadLetterPolicy", + "ExpirationPolicy", + "PushConfig", + "BigQueryConfig", + "CloudStorageConfig", + "ReceivedMessage", + "GetSubscriptionRequest", + "UpdateSubscriptionRequest", + "ListSubscriptionsRequest", + "ListSubscriptionsResponse", + "DeleteSubscriptionRequest", + "ModifyPushConfigRequest", + "PullRequest", + "PullResponse", + "ModifyAckDeadlineRequest", + "AcknowledgeRequest", + "StreamingPullRequest", + "StreamingPullResponse", + "CreateSnapshotRequest", + "UpdateSnapshotRequest", + "Snapshot", + "GetSnapshotRequest", + "ListSnapshotsRequest", + "ListSnapshotsResponse", + "DeleteSnapshotRequest", + "SeekRequest", + "SeekResponse", + }, +) + + +class MessageStoragePolicy(proto.Message): + r"""A policy constraining the storage of messages published to + the topic. + + Attributes: + allowed_persistence_regions (MutableSequence[str]): + Optional. A list of IDs of Google Cloud + regions where messages that are published to the + topic may be persisted in storage. Messages + published by publishers running in non-allowed + Google Cloud regions (or running outside of + Google Cloud altogether) are routed for storage + in one of the allowed regions. An empty list + means that no regions are allowed, and is not a + valid configuration. + enforce_in_transit (bool): + Optional. If true, ``allowed_persistence_regions`` is also + used to enforce in-transit guarantees for messages. That is, + Pub/Sub will fail Publish operations on this topic and + subscribe operations on any subscription attached to this + topic in any region that is not in + ``allowed_persistence_regions``. + """ + + allowed_persistence_regions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + enforce_in_transit: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class SchemaSettings(proto.Message): + r"""Settings for validating messages published against a schema. + + Attributes: + schema (str): + Required. The name of the schema that messages published + should be validated against. Format is + ``projects/{project}/schemas/{schema}``. The value of this + field will be ``_deleted-schema_`` if the schema has been + deleted. + encoding (google.pubsub_v1.types.Encoding): + Optional. The encoding of messages validated against + ``schema``. + first_revision_id (str): + Optional. The minimum (inclusive) revision allowed for + validating messages. If empty or not present, allow any + revision to be validated against last_revision or any + revision created before. + last_revision_id (str): + Optional. The maximum (inclusive) revision allowed for + validating messages. If empty or not present, allow any + revision to be validated against first_revision or any + revision created after. + """ + + schema: str = proto.Field( + proto.STRING, + number=1, + ) + encoding: gp_schema.Encoding = proto.Field( + proto.ENUM, + number=2, + enum=gp_schema.Encoding, + ) + first_revision_id: str = proto.Field( + proto.STRING, + number=3, + ) + last_revision_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class IngestionDataSourceSettings(proto.Message): + r"""Settings for an ingestion data source on a topic. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + aws_kinesis (google.pubsub_v1.types.IngestionDataSourceSettings.AwsKinesis): + Optional. Amazon Kinesis Data Streams. + + This field is a member of `oneof`_ ``source``. + cloud_storage (google.pubsub_v1.types.IngestionDataSourceSettings.CloudStorage): + Optional. Cloud Storage. + + This field is a member of `oneof`_ ``source``. + azure_event_hubs (google.pubsub_v1.types.IngestionDataSourceSettings.AzureEventHubs): + Optional. Azure Event Hubs. + + This field is a member of `oneof`_ ``source``. + aws_msk (google.pubsub_v1.types.IngestionDataSourceSettings.AwsMsk): + Optional. Amazon MSK. + + This field is a member of `oneof`_ ``source``. + confluent_cloud (google.pubsub_v1.types.IngestionDataSourceSettings.ConfluentCloud): + Optional. Confluent Cloud. + + This field is a member of `oneof`_ ``source``. + platform_logs_settings (google.pubsub_v1.types.PlatformLogsSettings): + Optional. Platform Logs settings. If unset, + no Platform Logs will be generated. + """ + + class AwsKinesis(proto.Message): + r"""Ingestion settings for Amazon Kinesis Data Streams. + + Attributes: + state (google.pubsub_v1.types.IngestionDataSourceSettings.AwsKinesis.State): + Output only. An output-only field that + indicates the state of the Kinesis ingestion + source. + stream_arn (str): + Required. The Kinesis stream ARN to ingest + data from. + consumer_arn (str): + Required. The Kinesis consumer ARN to used + for ingestion in Enhanced Fan-Out mode. The + consumer must be already created and ready to be + used. + aws_role_arn (str): + Required. AWS role ARN to be used for + Federated Identity authentication with Kinesis. + Check the Pub/Sub docs for how to set up this + role and the required permissions that need to + be attached to it. + gcp_service_account (str): + Required. The GCP service account to be used for Federated + Identity authentication with Kinesis (via a + ``AssumeRoleWithWebIdentity`` call for the provided role). + The ``aws_role_arn`` must be set up with + ``accounts.google.com:sub`` equals to this service account + number. + """ + + class State(proto.Enum): + r"""Possible states for ingestion from Amazon Kinesis Data + Streams. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + Ingestion is active. + KINESIS_PERMISSION_DENIED (2): + Permission denied encountered while consuming data from + Kinesis. This can happen if: + + - The provided ``aws_role_arn`` does not exist or does not + have the appropriate permissions attached. + - The provided ``aws_role_arn`` is not set up properly for + Identity Federation using ``gcp_service_account``. + - The Pub/Sub SA is not granted the + ``iam.serviceAccounts.getOpenIdToken`` permission on + ``gcp_service_account``. + PUBLISH_PERMISSION_DENIED (3): + Permission denied encountered while publishing to the topic. + This can happen if the Pub/Sub SA has not been granted the + `appropriate publish + permissions `__ + STREAM_NOT_FOUND (4): + The Kinesis stream does not exist. + CONSUMER_NOT_FOUND (5): + The Kinesis consumer does not exist. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + KINESIS_PERMISSION_DENIED = 2 + PUBLISH_PERMISSION_DENIED = 3 + STREAM_NOT_FOUND = 4 + CONSUMER_NOT_FOUND = 5 + + state: "IngestionDataSourceSettings.AwsKinesis.State" = proto.Field( + proto.ENUM, + number=1, + enum="IngestionDataSourceSettings.AwsKinesis.State", + ) + stream_arn: str = proto.Field( + proto.STRING, + number=2, + ) + consumer_arn: str = proto.Field( + proto.STRING, + number=3, + ) + aws_role_arn: str = proto.Field( + proto.STRING, + number=4, + ) + gcp_service_account: str = proto.Field( + proto.STRING, + number=5, + ) + + class CloudStorage(proto.Message): + r"""Ingestion settings for Cloud Storage. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + state (google.pubsub_v1.types.IngestionDataSourceSettings.CloudStorage.State): + Output only. An output-only field that + indicates the state of the Cloud Storage + ingestion source. + bucket (str): + Optional. Cloud Storage bucket. The bucket name must be + without any prefix like "gs://". See the [bucket naming + requirements] + (https://cloud.google.com/storage/docs/buckets#naming). + text_format (google.pubsub_v1.types.IngestionDataSourceSettings.CloudStorage.TextFormat): + Optional. Data from Cloud Storage will be + interpreted as text. + + This field is a member of `oneof`_ ``input_format``. + avro_format (google.pubsub_v1.types.IngestionDataSourceSettings.CloudStorage.AvroFormat): + Optional. Data from Cloud Storage will be + interpreted in Avro format. + + This field is a member of `oneof`_ ``input_format``. + pubsub_avro_format (google.pubsub_v1.types.IngestionDataSourceSettings.CloudStorage.PubSubAvroFormat): + Optional. It will be assumed data from Cloud Storage was + written via `Cloud Storage + subscriptions `__. + + This field is a member of `oneof`_ ``input_format``. + minimum_object_create_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Only objects with a larger or equal + creation timestamp will be ingested. + match_glob (str): + Optional. Glob pattern used to match objects that will be + ingested. If unset, all objects will be ingested. See the + `supported + patterns `__. + """ + + class State(proto.Enum): + r"""Possible states for ingestion from Cloud Storage. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + Ingestion is active. + CLOUD_STORAGE_PERMISSION_DENIED (2): + Permission denied encountered while calling the Cloud + Storage API. This can happen if the Pub/Sub SA has not been + granted the `appropriate + permissions `__: + + - storage.objects.list: to list the objects in a bucket. + - storage.objects.get: to read the objects in a bucket. + - storage.buckets.get: to verify the bucket exists. + PUBLISH_PERMISSION_DENIED (3): + Permission denied encountered while publishing to the topic. + This can happen if the Pub/Sub SA has not been granted the + `appropriate publish + permissions `__ + BUCKET_NOT_FOUND (4): + The provided Cloud Storage bucket doesn't + exist. + TOO_MANY_OBJECTS (5): + The Cloud Storage bucket has too many + objects, ingestion will be paused. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CLOUD_STORAGE_PERMISSION_DENIED = 2 + PUBLISH_PERMISSION_DENIED = 3 + BUCKET_NOT_FOUND = 4 + TOO_MANY_OBJECTS = 5 + + class TextFormat(proto.Message): + r"""Configuration for reading Cloud Storage data in text format. Each + line of text as specified by the delimiter will be set to the + ``data`` field of a Pub/Sub message. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + delimiter (str): + Optional. When unset, '\n' is used. + + This field is a member of `oneof`_ ``_delimiter``. + """ + + delimiter: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + + class AvroFormat(proto.Message): + r"""Configuration for reading Cloud Storage data in Avro binary format. + The bytes of each object will be set to the ``data`` field of a + Pub/Sub message. + + """ + + class PubSubAvroFormat(proto.Message): + r"""Configuration for reading Cloud Storage data written via `Cloud + Storage + subscriptions `__. + The data and attributes fields of the originally exported Pub/Sub + message will be restored when publishing. + + """ + + state: "IngestionDataSourceSettings.CloudStorage.State" = proto.Field( + proto.ENUM, + number=1, + enum="IngestionDataSourceSettings.CloudStorage.State", + ) + bucket: str = proto.Field( + proto.STRING, + number=2, + ) + text_format: "IngestionDataSourceSettings.CloudStorage.TextFormat" = ( + proto.Field( + proto.MESSAGE, + number=3, + oneof="input_format", + message="IngestionDataSourceSettings.CloudStorage.TextFormat", + ) + ) + avro_format: "IngestionDataSourceSettings.CloudStorage.AvroFormat" = ( + proto.Field( + proto.MESSAGE, + number=4, + oneof="input_format", + message="IngestionDataSourceSettings.CloudStorage.AvroFormat", + ) + ) + pubsub_avro_format: "IngestionDataSourceSettings.CloudStorage.PubSubAvroFormat" = proto.Field( + proto.MESSAGE, + number=5, + oneof="input_format", + message="IngestionDataSourceSettings.CloudStorage.PubSubAvroFormat", + ) + minimum_object_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + match_glob: str = proto.Field( + proto.STRING, + number=9, + ) + + class AzureEventHubs(proto.Message): + r"""Ingestion settings for Azure Event Hubs. + + Attributes: + state (google.pubsub_v1.types.IngestionDataSourceSettings.AzureEventHubs.State): + Output only. An output-only field that + indicates the state of the Event Hubs ingestion + source. + resource_group (str): + Optional. Name of the resource group within + the azure subscription. + namespace (str): + Optional. The name of the Event Hubs + namespace. + event_hub (str): + Optional. The name of the Event Hub. + client_id (str): + Optional. The client id of the Azure + application that is being used to authenticate + Pub/Sub. + tenant_id (str): + Optional. The tenant id of the Azure + application that is being used to authenticate + Pub/Sub. + subscription_id (str): + Optional. The Azure subscription id. + gcp_service_account (str): + Optional. The GCP service account to be used + for Federated Identity authentication. + """ + + class State(proto.Enum): + r"""Possible states for managed ingestion from Event Hubs. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + Ingestion is active. + EVENT_HUBS_PERMISSION_DENIED (2): + Permission denied encountered while consuming data from + Event Hubs. This can happen when ``client_id``, or + ``tenant_id`` are invalid. Or the right permissions haven't + been granted. + PUBLISH_PERMISSION_DENIED (3): + Permission denied encountered while + publishing to the topic. + NAMESPACE_NOT_FOUND (4): + The provided Event Hubs namespace couldn't be + found. + EVENT_HUB_NOT_FOUND (5): + The provided Event Hub couldn't be found. + SUBSCRIPTION_NOT_FOUND (6): + The provided Event Hubs subscription couldn't + be found. + RESOURCE_GROUP_NOT_FOUND (7): + The provided Event Hubs resource group + couldn't be found. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + EVENT_HUBS_PERMISSION_DENIED = 2 + PUBLISH_PERMISSION_DENIED = 3 + NAMESPACE_NOT_FOUND = 4 + EVENT_HUB_NOT_FOUND = 5 + SUBSCRIPTION_NOT_FOUND = 6 + RESOURCE_GROUP_NOT_FOUND = 7 + + state: "IngestionDataSourceSettings.AzureEventHubs.State" = proto.Field( + proto.ENUM, + number=1, + enum="IngestionDataSourceSettings.AzureEventHubs.State", + ) + resource_group: str = proto.Field( + proto.STRING, + number=2, + ) + namespace: str = proto.Field( + proto.STRING, + number=3, + ) + event_hub: str = proto.Field( + proto.STRING, + number=4, + ) + client_id: str = proto.Field( + proto.STRING, + number=5, + ) + tenant_id: str = proto.Field( + proto.STRING, + number=6, + ) + subscription_id: str = proto.Field( + proto.STRING, + number=7, + ) + gcp_service_account: str = proto.Field( + proto.STRING, + number=8, + ) + + class AwsMsk(proto.Message): + r"""Ingestion settings for Amazon MSK. + + Attributes: + state (google.pubsub_v1.types.IngestionDataSourceSettings.AwsMsk.State): + Output only. An output-only field that + indicates the state of the Amazon MSK ingestion + source. + cluster_arn (str): + Required. The Amazon Resource Name (ARN) that + uniquely identifies the cluster. + topic (str): + Required. The name of the topic in the Amazon + MSK cluster that Pub/Sub will import from. + aws_role_arn (str): + Required. AWS role ARN to be used for + Federated Identity authentication with Amazon + MSK. Check the Pub/Sub docs for how to set up + this role and the required permissions that need + to be attached to it. + gcp_service_account (str): + Required. The GCP service account to be used for Federated + Identity authentication with Amazon MSK (via a + ``AssumeRoleWithWebIdentity`` call for the provided role). + The ``aws_role_arn`` must be set up with + ``accounts.google.com:sub`` equals to this service account + number. + """ + + class State(proto.Enum): + r"""Possible states for managed ingestion from Amazon MSK. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + Ingestion is active. + MSK_PERMISSION_DENIED (2): + Permission denied encountered while consuming + data from Amazon MSK. + PUBLISH_PERMISSION_DENIED (3): + Permission denied encountered while + publishing to the topic. + CLUSTER_NOT_FOUND (4): + The provided MSK cluster wasn't found. + TOPIC_NOT_FOUND (5): + The provided topic wasn't found. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + MSK_PERMISSION_DENIED = 2 + PUBLISH_PERMISSION_DENIED = 3 + CLUSTER_NOT_FOUND = 4 + TOPIC_NOT_FOUND = 5 + + state: "IngestionDataSourceSettings.AwsMsk.State" = proto.Field( + proto.ENUM, + number=1, + enum="IngestionDataSourceSettings.AwsMsk.State", + ) + cluster_arn: str = proto.Field( + proto.STRING, + number=2, + ) + topic: str = proto.Field( + proto.STRING, + number=3, + ) + aws_role_arn: str = proto.Field( + proto.STRING, + number=4, + ) + gcp_service_account: str = proto.Field( + proto.STRING, + number=5, + ) + + class ConfluentCloud(proto.Message): + r"""Ingestion settings for Confluent Cloud. + + Attributes: + state (google.pubsub_v1.types.IngestionDataSourceSettings.ConfluentCloud.State): + Output only. An output-only field that + indicates the state of the Confluent Cloud + ingestion source. + bootstrap_server (str): + Required. The address of the bootstrap + server. The format is url:port. + cluster_id (str): + Required. The id of the cluster. + topic (str): + Required. The name of the topic in the + Confluent Cloud cluster that Pub/Sub will import + from. + identity_pool_id (str): + Required. The id of the identity pool to be + used for Federated Identity authentication with + Confluent Cloud. See + https://docs.confluent.io/cloud/current/security/authenticate/workload-identities/identity-providers/oauth/identity-pools.html#add-oauth-identity-pools. + gcp_service_account (str): + Required. The GCP service account to be used for Federated + Identity authentication with ``identity_pool_id``. + """ + + class State(proto.Enum): + r"""Possible states for managed ingestion from Confluent Cloud. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + Ingestion is active. + CONFLUENT_CLOUD_PERMISSION_DENIED (2): + Permission denied encountered while consuming + data from Confluent Cloud. + PUBLISH_PERMISSION_DENIED (3): + Permission denied encountered while + publishing to the topic. + UNREACHABLE_BOOTSTRAP_SERVER (4): + The provided bootstrap server address is + unreachable. + CLUSTER_NOT_FOUND (5): + The provided cluster wasn't found. + TOPIC_NOT_FOUND (6): + The provided topic wasn't found. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CONFLUENT_CLOUD_PERMISSION_DENIED = 2 + PUBLISH_PERMISSION_DENIED = 3 + UNREACHABLE_BOOTSTRAP_SERVER = 4 + CLUSTER_NOT_FOUND = 5 + TOPIC_NOT_FOUND = 6 + + state: "IngestionDataSourceSettings.ConfluentCloud.State" = proto.Field( + proto.ENUM, + number=1, + enum="IngestionDataSourceSettings.ConfluentCloud.State", + ) + bootstrap_server: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + topic: str = proto.Field( + proto.STRING, + number=4, + ) + identity_pool_id: str = proto.Field( + proto.STRING, + number=5, + ) + gcp_service_account: str = proto.Field( + proto.STRING, + number=6, + ) + + aws_kinesis: AwsKinesis = proto.Field( + proto.MESSAGE, + number=1, + oneof="source", + message=AwsKinesis, + ) + cloud_storage: CloudStorage = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message=CloudStorage, + ) + azure_event_hubs: AzureEventHubs = proto.Field( + proto.MESSAGE, + number=3, + oneof="source", + message=AzureEventHubs, + ) + aws_msk: AwsMsk = proto.Field( + proto.MESSAGE, + number=5, + oneof="source", + message=AwsMsk, + ) + confluent_cloud: ConfluentCloud = proto.Field( + proto.MESSAGE, + number=6, + oneof="source", + message=ConfluentCloud, + ) + platform_logs_settings: "PlatformLogsSettings" = proto.Field( + proto.MESSAGE, + number=4, + message="PlatformLogsSettings", + ) + + +class PlatformLogsSettings(proto.Message): + r"""Settings for Platform Logs produced by Pub/Sub. + + Attributes: + severity (google.pubsub_v1.types.PlatformLogsSettings.Severity): + Optional. The minimum severity level of + Platform Logs that will be written. + """ + + class Severity(proto.Enum): + r"""Severity levels of Platform Logs. + + Values: + SEVERITY_UNSPECIFIED (0): + Default value. Logs level is unspecified. + Logs will be disabled. + DISABLED (1): + Logs will be disabled. + DEBUG (2): + Debug logs and higher-severity logs will be + written. + INFO (3): + Info logs and higher-severity logs will be + written. + WARNING (4): + Warning logs and higher-severity logs will be + written. + ERROR (5): + Only error logs will be written. + """ + SEVERITY_UNSPECIFIED = 0 + DISABLED = 1 + DEBUG = 2 + INFO = 3 + WARNING = 4 + ERROR = 5 + + severity: Severity = proto.Field( + proto.ENUM, + number=1, + enum=Severity, + ) + + +class IngestionFailureEvent(proto.Message): + r"""Payload of the Platform Log entry sent when a failure is + encountered while ingesting. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + topic (str): + Required. Name of the import topic. Format is: + projects/{project_name}/topics/{topic_name}. + error_message (str): + Required. Error details explaining why + ingestion to Pub/Sub has failed. + cloud_storage_failure (google.pubsub_v1.types.IngestionFailureEvent.CloudStorageFailure): + Optional. Failure when ingesting from Cloud + Storage. + + This field is a member of `oneof`_ ``failure``. + aws_msk_failure (google.pubsub_v1.types.IngestionFailureEvent.AwsMskFailureReason): + Optional. Failure when ingesting from Amazon + MSK. + + This field is a member of `oneof`_ ``failure``. + azure_event_hubs_failure (google.pubsub_v1.types.IngestionFailureEvent.AzureEventHubsFailureReason): + Optional. Failure when ingesting from Azure + Event Hubs. + + This field is a member of `oneof`_ ``failure``. + confluent_cloud_failure (google.pubsub_v1.types.IngestionFailureEvent.ConfluentCloudFailureReason): + Optional. Failure when ingesting from + Confluent Cloud. + + This field is a member of `oneof`_ ``failure``. + aws_kinesis_failure (google.pubsub_v1.types.IngestionFailureEvent.AwsKinesisFailureReason): + Optional. Failure when ingesting from AWS + Kinesis. + + This field is a member of `oneof`_ ``failure``. + """ + + class ApiViolationReason(proto.Message): + r"""Specifies the reason why some data may have been left out of the + desired Pub/Sub message due to the API message limits + (https://cloud.google.com/pubsub/quotas#resource_limits). For + example, when the number of attributes is larger than 100, the + number of attributes is truncated to 100 to respect the limit on the + attribute count. Other attribute limits are treated similarly. When + the size of the desired message would've been larger than 10MB, the + message won't be published at all, and ingestion of the subsequent + messages will proceed as normal. + + """ + + class AvroFailureReason(proto.Message): + r"""Set when an Avro file is unsupported or its format is not + valid. When this occurs, one or more Avro objects won't be + ingested. + + """ + + class SchemaViolationReason(proto.Message): + r"""Set when a Pub/Sub message fails to get published due to a + schema validation violation. + + """ + + class MessageTransformationFailureReason(proto.Message): + r"""Set when a Pub/Sub message fails to get published due to a + message transformation error. + + """ + + class CloudStorageFailure(proto.Message): + r"""Failure when ingesting from a Cloud Storage source. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bucket (str): + Optional. Name of the Cloud Storage bucket + used for ingestion. + object_name (str): + Optional. Name of the Cloud Storage object + which contained the section that couldn't be + ingested. + object_generation (int): + Optional. Generation of the Cloud Storage + object which contained the section that couldn't + be ingested. + avro_failure_reason (google.pubsub_v1.types.IngestionFailureEvent.AvroFailureReason): + Optional. Failure encountered when parsing an + Avro file. + + This field is a member of `oneof`_ ``reason``. + api_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.ApiViolationReason): + Optional. The Pub/Sub API limits prevented + the desired message from being published. + + This field is a member of `oneof`_ ``reason``. + schema_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.SchemaViolationReason): + Optional. The Pub/Sub message failed schema + validation. + + This field is a member of `oneof`_ ``reason``. + message_transformation_failure_reason (google.pubsub_v1.types.IngestionFailureEvent.MessageTransformationFailureReason): + Optional. Failure encountered when applying a + message transformation to the Pub/Sub message. + + This field is a member of `oneof`_ ``reason``. + """ + + bucket: str = proto.Field( + proto.STRING, + number=1, + ) + object_name: str = proto.Field( + proto.STRING, + number=2, + ) + object_generation: int = proto.Field( + proto.INT64, + number=3, + ) + avro_failure_reason: "IngestionFailureEvent.AvroFailureReason" = proto.Field( + proto.MESSAGE, + number=5, + oneof="reason", + message="IngestionFailureEvent.AvroFailureReason", + ) + api_violation_reason: "IngestionFailureEvent.ApiViolationReason" = proto.Field( + proto.MESSAGE, + number=6, + oneof="reason", + message="IngestionFailureEvent.ApiViolationReason", + ) + schema_violation_reason: "IngestionFailureEvent.SchemaViolationReason" = ( + proto.Field( + proto.MESSAGE, + number=7, + oneof="reason", + message="IngestionFailureEvent.SchemaViolationReason", + ) + ) + message_transformation_failure_reason: "IngestionFailureEvent.MessageTransformationFailureReason" = proto.Field( + proto.MESSAGE, + number=8, + oneof="reason", + message="IngestionFailureEvent.MessageTransformationFailureReason", + ) + + class AwsMskFailureReason(proto.Message): + r"""Failure when ingesting from an Amazon MSK source. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cluster_arn (str): + Optional. The ARN of the cluster of the topic + being ingested from. + kafka_topic (str): + Optional. The name of the Kafka topic being + ingested from. + partition_id (int): + Optional. The partition ID of the message + that failed to be ingested. + offset (int): + Optional. The offset within the partition of + the message that failed to be ingested. + api_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.ApiViolationReason): + Optional. The Pub/Sub API limits prevented + the desired message from being published. + + This field is a member of `oneof`_ ``reason``. + schema_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.SchemaViolationReason): + Optional. The Pub/Sub message failed schema + validation. + + This field is a member of `oneof`_ ``reason``. + message_transformation_failure_reason (google.pubsub_v1.types.IngestionFailureEvent.MessageTransformationFailureReason): + Optional. Failure encountered when applying a + message transformation to the Pub/Sub message. + + This field is a member of `oneof`_ ``reason``. + """ + + cluster_arn: str = proto.Field( + proto.STRING, + number=1, + ) + kafka_topic: str = proto.Field( + proto.STRING, + number=2, + ) + partition_id: int = proto.Field( + proto.INT64, + number=3, + ) + offset: int = proto.Field( + proto.INT64, + number=4, + ) + api_violation_reason: "IngestionFailureEvent.ApiViolationReason" = proto.Field( + proto.MESSAGE, + number=5, + oneof="reason", + message="IngestionFailureEvent.ApiViolationReason", + ) + schema_violation_reason: "IngestionFailureEvent.SchemaViolationReason" = ( + proto.Field( + proto.MESSAGE, + number=6, + oneof="reason", + message="IngestionFailureEvent.SchemaViolationReason", + ) + ) + message_transformation_failure_reason: "IngestionFailureEvent.MessageTransformationFailureReason" = proto.Field( + proto.MESSAGE, + number=7, + oneof="reason", + message="IngestionFailureEvent.MessageTransformationFailureReason", + ) + + class AzureEventHubsFailureReason(proto.Message): + r"""Failure when ingesting from an Azure Event Hubs source. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + namespace (str): + Optional. The namespace containing the event + hub being ingested from. + event_hub (str): + Optional. The name of the event hub being + ingested from. + partition_id (int): + Optional. The partition ID of the message + that failed to be ingested. + offset (int): + Optional. The offset within the partition of + the message that failed to be ingested. + api_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.ApiViolationReason): + Optional. The Pub/Sub API limits prevented + the desired message from being published. + + This field is a member of `oneof`_ ``reason``. + schema_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.SchemaViolationReason): + Optional. The Pub/Sub message failed schema + validation. + + This field is a member of `oneof`_ ``reason``. + message_transformation_failure_reason (google.pubsub_v1.types.IngestionFailureEvent.MessageTransformationFailureReason): + Optional. Failure encountered when applying a + message transformation to the Pub/Sub message. + + This field is a member of `oneof`_ ``reason``. + """ + + namespace: str = proto.Field( + proto.STRING, + number=1, + ) + event_hub: str = proto.Field( + proto.STRING, + number=2, + ) + partition_id: int = proto.Field( + proto.INT64, + number=3, + ) + offset: int = proto.Field( + proto.INT64, + number=4, + ) + api_violation_reason: "IngestionFailureEvent.ApiViolationReason" = proto.Field( + proto.MESSAGE, + number=5, + oneof="reason", + message="IngestionFailureEvent.ApiViolationReason", + ) + schema_violation_reason: "IngestionFailureEvent.SchemaViolationReason" = ( + proto.Field( + proto.MESSAGE, + number=6, + oneof="reason", + message="IngestionFailureEvent.SchemaViolationReason", + ) + ) + message_transformation_failure_reason: "IngestionFailureEvent.MessageTransformationFailureReason" = proto.Field( + proto.MESSAGE, + number=7, + oneof="reason", + message="IngestionFailureEvent.MessageTransformationFailureReason", + ) + + class ConfluentCloudFailureReason(proto.Message): + r"""Failure when ingesting from a Confluent Cloud source. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cluster_id (str): + Optional. The cluster ID containing the topic + being ingested from. + kafka_topic (str): + Optional. The name of the Kafka topic being + ingested from. + partition_id (int): + Optional. The partition ID of the message + that failed to be ingested. + offset (int): + Optional. The offset within the partition of + the message that failed to be ingested. + api_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.ApiViolationReason): + Optional. The Pub/Sub API limits prevented + the desired message from being published. + + This field is a member of `oneof`_ ``reason``. + schema_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.SchemaViolationReason): + Optional. The Pub/Sub message failed schema + validation. + + This field is a member of `oneof`_ ``reason``. + message_transformation_failure_reason (google.pubsub_v1.types.IngestionFailureEvent.MessageTransformationFailureReason): + Optional. Failure encountered when applying a + message transformation to the Pub/Sub message. + + This field is a member of `oneof`_ ``reason``. + """ + + cluster_id: str = proto.Field( + proto.STRING, + number=1, + ) + kafka_topic: str = proto.Field( + proto.STRING, + number=2, + ) + partition_id: int = proto.Field( + proto.INT64, + number=3, + ) + offset: int = proto.Field( + proto.INT64, + number=4, + ) + api_violation_reason: "IngestionFailureEvent.ApiViolationReason" = proto.Field( + proto.MESSAGE, + number=5, + oneof="reason", + message="IngestionFailureEvent.ApiViolationReason", + ) + schema_violation_reason: "IngestionFailureEvent.SchemaViolationReason" = ( + proto.Field( + proto.MESSAGE, + number=6, + oneof="reason", + message="IngestionFailureEvent.SchemaViolationReason", + ) + ) + message_transformation_failure_reason: "IngestionFailureEvent.MessageTransformationFailureReason" = proto.Field( + proto.MESSAGE, + number=7, + oneof="reason", + message="IngestionFailureEvent.MessageTransformationFailureReason", + ) + + class AwsKinesisFailureReason(proto.Message): + r"""Failure when ingesting from an AWS Kinesis source. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + stream_arn (str): + Optional. The stream ARN of the Kinesis + stream being ingested from. + partition_key (str): + Optional. The partition key of the message + that failed to be ingested. + sequence_number (str): + Optional. The sequence number of the message + that failed to be ingested. + schema_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.SchemaViolationReason): + Optional. The Pub/Sub message failed schema + validation. + + This field is a member of `oneof`_ ``reason``. + message_transformation_failure_reason (google.pubsub_v1.types.IngestionFailureEvent.MessageTransformationFailureReason): + Optional. Failure encountered when applying a + message transformation to the Pub/Sub message. + + This field is a member of `oneof`_ ``reason``. + api_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.ApiViolationReason): + Optional. The message failed to be published + due to an API violation. This is only set when + the size of the data field of the Kinesis record + is zero. + + This field is a member of `oneof`_ ``reason``. + """ + + stream_arn: str = proto.Field( + proto.STRING, + number=1, + ) + partition_key: str = proto.Field( + proto.STRING, + number=2, + ) + sequence_number: str = proto.Field( + proto.STRING, + number=3, + ) + schema_violation_reason: "IngestionFailureEvent.SchemaViolationReason" = ( + proto.Field( + proto.MESSAGE, + number=4, + oneof="reason", + message="IngestionFailureEvent.SchemaViolationReason", + ) + ) + message_transformation_failure_reason: "IngestionFailureEvent.MessageTransformationFailureReason" = proto.Field( + proto.MESSAGE, + number=5, + oneof="reason", + message="IngestionFailureEvent.MessageTransformationFailureReason", + ) + api_violation_reason: "IngestionFailureEvent.ApiViolationReason" = proto.Field( + proto.MESSAGE, + number=6, + oneof="reason", + message="IngestionFailureEvent.ApiViolationReason", + ) + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + error_message: str = proto.Field( + proto.STRING, + number=2, + ) + cloud_storage_failure: CloudStorageFailure = proto.Field( + proto.MESSAGE, + number=3, + oneof="failure", + message=CloudStorageFailure, + ) + aws_msk_failure: AwsMskFailureReason = proto.Field( + proto.MESSAGE, + number=4, + oneof="failure", + message=AwsMskFailureReason, + ) + azure_event_hubs_failure: AzureEventHubsFailureReason = proto.Field( + proto.MESSAGE, + number=5, + oneof="failure", + message=AzureEventHubsFailureReason, + ) + confluent_cloud_failure: ConfluentCloudFailureReason = proto.Field( + proto.MESSAGE, + number=6, + oneof="failure", + message=ConfluentCloudFailureReason, + ) + aws_kinesis_failure: AwsKinesisFailureReason = proto.Field( + proto.MESSAGE, + number=7, + oneof="failure", + message=AwsKinesisFailureReason, + ) + + +class JavaScriptUDF(proto.Message): + r"""User-defined JavaScript function that can transform or filter + a Pub/Sub message. + + Attributes: + function_name (str): + Required. Name of the JavasScript function + that should applied to Pub/Sub messages. + code (str): + Required. JavaScript code that contains a function + ``function_name`` with the below signature: + + :: + + /** + * Transforms a Pub/Sub message. + + * @return {(Object)>|null)} - To + * filter a message, return `null`. To transform a message return a map + * with the following keys: + * - (required) 'data' : {string} + * - (optional) 'attributes' : {Object} + * Returning empty `attributes` will remove all attributes from the + * message. + * + * @param {(Object)>} Pub/Sub + * message. Keys: + * - (required) 'data' : {string} + * - (required) 'attributes' : {Object} + * + * @param {Object} metadata - Pub/Sub message metadata. + * Keys: + * - (optional) 'message_id' : {string} + * - (optional) 'publish_time': {string} YYYY-MM-DDTHH:MM:SSZ format + * - (optional) 'ordering_key': {string} + */ + + function (message, metadata) { + } + """ + + function_name: str = proto.Field( + proto.STRING, + number=1, + ) + code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MessageTransform(proto.Message): + r"""All supported message transforms types. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + javascript_udf (google.pubsub_v1.types.JavaScriptUDF): + Optional. JavaScript User Defined Function. If multiple + JavaScriptUDF's are specified on a resource, each must have + a unique ``function_name``. + + This field is a member of `oneof`_ ``transform``. + enabled (bool): + Optional. This field is deprecated, use the ``disabled`` + field to disable transforms. + disabled (bool): + Optional. If true, the transform is disabled and will not be + applied to messages. Defaults to ``false``. + """ + + javascript_udf: "JavaScriptUDF" = proto.Field( + proto.MESSAGE, + number=2, + oneof="transform", + message="JavaScriptUDF", + ) + enabled: bool = proto.Field( + proto.BOOL, + number=3, + ) + disabled: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class Topic(proto.Message): + r"""A topic resource. + + Attributes: + name (str): + Required. The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` must + start with a letter, and contain only letters + (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), plus + (``+``) or percent signs (``%``). It must be between 3 and + 255 characters in length, and it must not start with + ``"goog"``. + labels (MutableMapping[str, str]): + Optional. See [Creating and managing labels] + (https://cloud.google.com/pubsub/docs/labels). + message_storage_policy (google.pubsub_v1.types.MessageStoragePolicy): + Optional. Policy constraining the set of + Google Cloud Platform regions where messages + published to the topic may be stored. If not + present, then no constraints are in effect. + kms_key_name (str): + Optional. The resource name of the Cloud KMS CryptoKey to be + used to protect access to messages published on this topic. + + The expected format is + ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. + schema_settings (google.pubsub_v1.types.SchemaSettings): + Optional. Settings for validating messages + published against a schema. + satisfies_pzs (bool): + Optional. Reserved for future use. This field + is set only in responses from the server; it is + ignored if it is set in any requests. + message_retention_duration (google.protobuf.duration_pb2.Duration): + Optional. Indicates the minimum duration to retain a message + after it is published to the topic. If this field is set, + messages published to the topic in the last + ``message_retention_duration`` are always available to + subscribers. For instance, it allows any attached + subscription to `seek to a + timestamp `__ + that is up to ``message_retention_duration`` in the past. If + this field is not set, message retention is controlled by + settings on individual subscriptions. Cannot be more than 31 + days or less than 10 minutes. + state (google.pubsub_v1.types.Topic.State): + Output only. An output-only field indicating + the state of the topic. + ingestion_data_source_settings (google.pubsub_v1.types.IngestionDataSourceSettings): + Optional. Settings for ingestion from a data + source into this topic. + message_transforms (MutableSequence[google.pubsub_v1.types.MessageTransform]): + Optional. Transforms to be applied to + messages published to the topic. Transforms are + applied in the order specified. + tags (MutableMapping[str, str]): + Optional. Input only. Immutable. Tag + keys/values directly bound to this resource. For + example: + + "123/environment": "production", + "123/costCenter": "marketing". + """ + + class State(proto.Enum): + r"""The state of the topic. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + The topic does not have any persistent + errors. + INGESTION_RESOURCE_ERROR (2): + Ingestion from the data source has + encountered a permanent error. See the more + detailed error state in the corresponding + ingestion source configuration. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + INGESTION_RESOURCE_ERROR = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + message_storage_policy: "MessageStoragePolicy" = proto.Field( + proto.MESSAGE, + number=3, + message="MessageStoragePolicy", + ) + kms_key_name: str = proto.Field( + proto.STRING, + number=5, + ) + schema_settings: "SchemaSettings" = proto.Field( + proto.MESSAGE, + number=6, + message="SchemaSettings", + ) + satisfies_pzs: bool = proto.Field( + proto.BOOL, + number=7, + ) + message_retention_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + state: State = proto.Field( + proto.ENUM, + number=9, + enum=State, + ) + ingestion_data_source_settings: "IngestionDataSourceSettings" = proto.Field( + proto.MESSAGE, + number=10, + message="IngestionDataSourceSettings", + ) + message_transforms: MutableSequence["MessageTransform"] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message="MessageTransform", + ) + tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=14, + ) + + +class PubsubMessage(proto.Message): + r"""A message that is published by publishers and consumed by + subscribers. The message must contain either a non-empty data field + or at least one attribute. Note that client libraries represent this + object differently depending on the language. See the corresponding + `client library + documentation `__ + for more information. See [quotas and limits] + (https://cloud.google.com/pubsub/quotas) for more information about + message limits. + + Attributes: + data (bytes): + Optional. The message data field. If this + field is empty, the message must contain at + least one attribute. + attributes (MutableMapping[str, str]): + Optional. Attributes for this message. If + this field is empty, the message must contain + non-empty data. This can be used to filter + messages on the subscription. + message_id (str): + ID of this message, assigned by the server when the message + is published. Guaranteed to be unique within the topic. This + value may be read by a subscriber that receives a + ``PubsubMessage`` via a ``Pull`` call or a push delivery. It + must not be populated by the publisher in a ``Publish`` + call. + publish_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the message was published, populated by + the server when it receives the ``Publish`` call. It must + not be populated by the publisher in a ``Publish`` call. + ordering_key (str): + Optional. If non-empty, identifies related messages for + which publish order should be respected. If a + ``Subscription`` has ``enable_message_ordering`` set to + ``true``, messages published with the same non-empty + ``ordering_key`` value will be delivered to subscribers in + the order in which they are received by the Pub/Sub system. + All ``PubsubMessage``\ s published in a given + ``PublishRequest`` must specify the same ``ordering_key`` + value. For more information, see `ordering + messages `__. + """ + + data: bytes = proto.Field( + proto.BYTES, + number=1, + ) + attributes: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + message_id: str = proto.Field( + proto.STRING, + number=3, + ) + publish_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + ordering_key: str = proto.Field( + proto.STRING, + number=5, + ) + + +class GetTopicRequest(proto.Message): + r"""Request for the GetTopic method. + + Attributes: + topic (str): + Required. The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. + """ + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateTopicRequest(proto.Message): + r"""Request for the UpdateTopic method. + + Attributes: + topic (google.pubsub_v1.types.Topic): + Required. The updated topic object. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Indicates which fields in the provided topic to + update. Must be specified and non-empty. Note that if + ``update_mask`` contains "message_storage_policy" but the + ``message_storage_policy`` is not set in the ``topic`` + provided above, then the updated value is determined by the + policy configured at the project or organization level. + """ + + topic: "Topic" = proto.Field( + proto.MESSAGE, + number=1, + message="Topic", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class PublishRequest(proto.Message): + r"""Request for the Publish method. + + Attributes: + topic (str): + Required. The messages in the request will be published on + this topic. Format is ``projects/{project}/topics/{topic}``. + messages (MutableSequence[google.pubsub_v1.types.PubsubMessage]): + Required. The messages to publish. + """ + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + messages: MutableSequence["PubsubMessage"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="PubsubMessage", + ) + + +class PublishResponse(proto.Message): + r"""Response for the ``Publish`` method. + + Attributes: + message_ids (MutableSequence[str]): + Optional. The server-assigned ID of each + published message, in the same order as the + messages in the request. IDs are guaranteed to + be unique within the topic. + """ + + message_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class ListTopicsRequest(proto.Message): + r"""Request for the ``ListTopics`` method. + + Attributes: + project (str): + Required. The name of the project in which to list topics. + Format is ``projects/{project-id}``. + page_size (int): + Optional. Maximum number of topics to return. + page_token (str): + Optional. The value returned by the last + ``ListTopicsResponse``; indicates that this is a + continuation of a prior ``ListTopics`` call, and that the + system should return the next page of data. + """ + + project: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTopicsResponse(proto.Message): + r"""Response for the ``ListTopics`` method. + + Attributes: + topics (MutableSequence[google.pubsub_v1.types.Topic]): + Optional. The resulting topics. + next_page_token (str): + Optional. If not empty, indicates that there may be more + topics that match the request; this value should be passed + in a new ``ListTopicsRequest``. + """ + + @property + def raw_page(self): + return self + + topics: MutableSequence["Topic"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Topic", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListTopicSubscriptionsRequest(proto.Message): + r"""Request for the ``ListTopicSubscriptions`` method. + + Attributes: + topic (str): + Required. The name of the topic that subscriptions are + attached to. Format is + ``projects/{project}/topics/{topic}``. + page_size (int): + Optional. Maximum number of subscription + names to return. + page_token (str): + Optional. The value returned by the last + ``ListTopicSubscriptionsResponse``; indicates that this is a + continuation of a prior ``ListTopicSubscriptions`` call, and + that the system should return the next page of data. + """ + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTopicSubscriptionsResponse(proto.Message): + r"""Response for the ``ListTopicSubscriptions`` method. + + Attributes: + subscriptions (MutableSequence[str]): + Optional. The names of subscriptions attached + to the topic specified in the request. + next_page_token (str): + Optional. If not empty, indicates that there may be more + subscriptions that match the request; this value should be + passed in a new ``ListTopicSubscriptionsRequest`` to get + more subscriptions. + """ + + @property + def raw_page(self): + return self + + subscriptions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListTopicSnapshotsRequest(proto.Message): + r"""Request for the ``ListTopicSnapshots`` method. + + Attributes: + topic (str): + Required. The name of the topic that snapshots are attached + to. Format is ``projects/{project}/topics/{topic}``. + page_size (int): + Optional. Maximum number of snapshot names to + return. + page_token (str): + Optional. The value returned by the last + ``ListTopicSnapshotsResponse``; indicates that this is a + continuation of a prior ``ListTopicSnapshots`` call, and + that the system should return the next page of data. + """ + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTopicSnapshotsResponse(proto.Message): + r"""Response for the ``ListTopicSnapshots`` method. + + Attributes: + snapshots (MutableSequence[str]): + Optional. The names of the snapshots that + match the request. + next_page_token (str): + Optional. If not empty, indicates that there may be more + snapshots that match the request; this value should be + passed in a new ``ListTopicSnapshotsRequest`` to get more + snapshots. + """ + + @property + def raw_page(self): + return self + + snapshots: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteTopicRequest(proto.Message): + r"""Request for the ``DeleteTopic`` method. + + Attributes: + topic (str): + Required. Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. + """ + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DetachSubscriptionRequest(proto.Message): + r"""Request for the DetachSubscription method. + + Attributes: + subscription (str): + Required. The subscription to detach. Format is + ``projects/{project}/subscriptions/{subscription}``. + """ + + subscription: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DetachSubscriptionResponse(proto.Message): + r"""Response for the DetachSubscription method. + Reserved for future use. + + """ + + +class Subscription(proto.Message): + r"""A subscription resource. If none of ``push_config``, + ``bigquery_config``, or ``cloud_storage_config`` is set, then the + subscriber will pull and ack messages using API methods. At most one + of these fields may be set. + + Attributes: + name (str): + Required. The name of the subscription. It must have the + format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain + only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes + (``-``), underscores (``_``), periods (``.``), tildes + (``~``), plus (``+``) or percent signs (``%``). It must be + between 3 and 255 characters in length, and it must not + start with ``"goog"``. + topic (str): + Required. The name of the topic from which this subscription + is receiving messages. Format is + ``projects/{project}/topics/{topic}``. The value of this + field will be ``_deleted-topic_`` if the topic has been + deleted. + push_config (google.pubsub_v1.types.PushConfig): + Optional. If push delivery is used with this + subscription, this field is used to configure + it. + bigquery_config (google.pubsub_v1.types.BigQueryConfig): + Optional. If delivery to BigQuery is used + with this subscription, this field is used to + configure it. + cloud_storage_config (google.pubsub_v1.types.CloudStorageConfig): + Optional. If delivery to Google Cloud Storage + is used with this subscription, this field is + used to configure it. + ack_deadline_seconds (int): + Optional. The approximate amount of time (on a best-effort + basis) Pub/Sub waits for the subscriber to acknowledge + receipt before resending the message. In the interval after + the message is delivered and before it is acknowledged, it + is considered to be *outstanding*. During that time period, + the message will not be redelivered (on a best-effort + basis). + + For pull subscriptions, this value is used as the initial + value for the ack deadline. To override this value for a + given message, call ``ModifyAckDeadline`` with the + corresponding ``ack_id`` if using non-streaming pull or send + the ``ack_id`` in a ``StreamingModifyAckDeadlineRequest`` if + using streaming pull. The minimum custom deadline you can + specify is 10 seconds. The maximum custom deadline you can + specify is 600 seconds (10 minutes). If this parameter is 0, + a default value of 10 seconds is used. + + For push delivery, this value is also used to set the + request timeout for the call to the push endpoint. + + If the subscriber never acknowledges the message, the + Pub/Sub system will eventually redeliver the message. + retain_acked_messages (bool): + Optional. Indicates whether to retain acknowledged messages. + If true, then messages are not expunged from the + subscription's backlog, even if they are acknowledged, until + they fall out of the ``message_retention_duration`` window. + This must be true if you would like to [``Seek`` to a + timestamp] + (https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time) + in the past to replay previously-acknowledged messages. + message_retention_duration (google.protobuf.duration_pb2.Duration): + Optional. How long to retain unacknowledged messages in the + subscription's backlog, from the moment a message is + published. If ``retain_acked_messages`` is true, then this + also configures the retention of acknowledged messages, and + thus configures how far back in time a ``Seek`` can be done. + Defaults to 7 days. Cannot be more than 31 days or less than + 10 minutes. + labels (MutableMapping[str, str]): + Optional. See `Creating and managing + labels `__. + enable_message_ordering (bool): + Optional. If true, messages published with the same + ``ordering_key`` in ``PubsubMessage`` will be delivered to + the subscribers in the order in which they are received by + the Pub/Sub system. Otherwise, they may be delivered in any + order. + expiration_policy (google.pubsub_v1.types.ExpirationPolicy): + Optional. A policy that specifies the conditions for this + subscription's expiration. A subscription is considered + active as long as any connected subscriber is successfully + consuming messages from the subscription or is issuing + operations on the subscription. If ``expiration_policy`` is + not set, a *default policy* with ``ttl`` of 31 days will be + used. The minimum allowed value for + ``expiration_policy.ttl`` is 1 day. If ``expiration_policy`` + is set, but ``expiration_policy.ttl`` is not set, the + subscription never expires. + filter (str): + Optional. An expression written in the Pub/Sub `filter + language `__. + If non-empty, then only ``PubsubMessage``\ s whose + ``attributes`` field matches the filter are delivered on + this subscription. If empty, then no messages are filtered + out. + dead_letter_policy (google.pubsub_v1.types.DeadLetterPolicy): + Optional. A policy that specifies the conditions for dead + lettering messages in this subscription. If + dead_letter_policy is not set, dead lettering is disabled. + + The Pub/Sub service account associated with this + subscriptions's parent project (i.e., + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) + must have permission to Acknowledge() messages on this + subscription. + retry_policy (google.pubsub_v1.types.RetryPolicy): + Optional. A policy that specifies how Pub/Sub + retries message delivery for this subscription. + + If not set, the default retry policy is applied. + This generally implies that messages will be + retried as soon as possible for healthy + subscribers. RetryPolicy will be triggered on + NACKs or acknowledgment deadline exceeded events + for a given message. + detached (bool): + Optional. Indicates whether the subscription is detached + from its topic. Detached subscriptions don't receive + messages from their topic and don't retain any backlog. + ``Pull`` and ``StreamingPull`` requests will return + FAILED_PRECONDITION. If the subscription is a push + subscription, pushes to the endpoint will not be made. + enable_exactly_once_delivery (bool): + Optional. If true, Pub/Sub provides the following guarantees + for the delivery of a message with a given value of + ``message_id`` on this subscription: + + - The message sent to a subscriber is guaranteed not to be + resent before the message's acknowledgment deadline + expires. + - An acknowledged message will not be resent to a + subscriber. + + Note that subscribers may still receive multiple copies of a + message when ``enable_exactly_once_delivery`` is true if the + message was published multiple times by a publisher client. + These copies are considered distinct by Pub/Sub and have + distinct ``message_id`` values. + topic_message_retention_duration (google.protobuf.duration_pb2.Duration): + Output only. Indicates the minimum duration for which a + message is retained after it is published to the + subscription's topic. If this field is set, messages + published to the subscription's topic in the last + ``topic_message_retention_duration`` are always available to + subscribers. See the ``message_retention_duration`` field in + ``Topic``. This field is set only in responses from the + server; it is ignored if it is set in any requests. + state (google.pubsub_v1.types.Subscription.State): + Output only. An output-only field indicating + whether or not the subscription can receive + messages. + analytics_hub_subscription_info (google.pubsub_v1.types.Subscription.AnalyticsHubSubscriptionInfo): + Output only. Information about the associated + Analytics Hub subscription. Only set if the + subscritpion is created by Analytics Hub. + message_transforms (MutableSequence[google.pubsub_v1.types.MessageTransform]): + Optional. Transforms to be applied to + messages before they are delivered to + subscribers. Transforms are applied in the order + specified. + tags (MutableMapping[str, str]): + Optional. Input only. Immutable. Tag + keys/values directly bound to this resource. For + example: + + "123/environment": "production", + "123/costCenter": "marketing". + """ + + class State(proto.Enum): + r"""Possible states for a subscription. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + The subscription can actively receive + messages + RESOURCE_ERROR (2): + The subscription cannot receive messages + because of an error with the resource to which + it pushes messages. See the more detailed error + state in the corresponding configuration. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + RESOURCE_ERROR = 2 + + class AnalyticsHubSubscriptionInfo(proto.Message): + r"""Information about an associated `Analytics Hub + subscription `__. + + Attributes: + listing (str): + Optional. The name of the associated Analytics Hub listing + resource. Pattern: + "projects/{project}/locations/{location}/dataExchanges/{data_exchange}/listings/{listing}". + subscription (str): + Optional. The name of the associated + Analytics Hub subscription resource. Pattern: + + "projects/{project}/locations/{location}/subscriptions/{subscription}". + """ + + listing: str = proto.Field( + proto.STRING, + number=1, + ) + subscription: str = proto.Field( + proto.STRING, + number=2, + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + topic: str = proto.Field( + proto.STRING, + number=2, + ) + push_config: "PushConfig" = proto.Field( + proto.MESSAGE, + number=4, + message="PushConfig", + ) + bigquery_config: "BigQueryConfig" = proto.Field( + proto.MESSAGE, + number=18, + message="BigQueryConfig", + ) + cloud_storage_config: "CloudStorageConfig" = proto.Field( + proto.MESSAGE, + number=22, + message="CloudStorageConfig", + ) + ack_deadline_seconds: int = proto.Field( + proto.INT32, + number=5, + ) + retain_acked_messages: bool = proto.Field( + proto.BOOL, + number=7, + ) + message_retention_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=9, + ) + enable_message_ordering: bool = proto.Field( + proto.BOOL, + number=10, + ) + expiration_policy: "ExpirationPolicy" = proto.Field( + proto.MESSAGE, + number=11, + message="ExpirationPolicy", + ) + filter: str = proto.Field( + proto.STRING, + number=12, + ) + dead_letter_policy: "DeadLetterPolicy" = proto.Field( + proto.MESSAGE, + number=13, + message="DeadLetterPolicy", + ) + retry_policy: "RetryPolicy" = proto.Field( + proto.MESSAGE, + number=14, + message="RetryPolicy", + ) + detached: bool = proto.Field( + proto.BOOL, + number=15, + ) + enable_exactly_once_delivery: bool = proto.Field( + proto.BOOL, + number=16, + ) + topic_message_retention_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=17, + message=duration_pb2.Duration, + ) + state: State = proto.Field( + proto.ENUM, + number=19, + enum=State, + ) + analytics_hub_subscription_info: AnalyticsHubSubscriptionInfo = proto.Field( + proto.MESSAGE, + number=23, + message=AnalyticsHubSubscriptionInfo, + ) + message_transforms: MutableSequence["MessageTransform"] = proto.RepeatedField( + proto.MESSAGE, + number=25, + message="MessageTransform", + ) + tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=26, + ) + + +class RetryPolicy(proto.Message): + r"""A policy that specifies how Pub/Sub retries message delivery. + + Retry delay will be exponential based on provided minimum and + maximum backoffs. https://en.wikipedia.org/wiki/Exponential_backoff. + + RetryPolicy will be triggered on NACKs or acknowledgment deadline + exceeded events for a given message. + + Retry Policy is implemented on a best effort basis. At times, the + delay between consecutive deliveries may not match the + configuration. That is, delay can be more or less than configured + backoff. + + Attributes: + minimum_backoff (google.protobuf.duration_pb2.Duration): + Optional. The minimum delay between + consecutive deliveries of a given message. Value + should be between 0 and 600 seconds. Defaults to + 10 seconds. + maximum_backoff (google.protobuf.duration_pb2.Duration): + Optional. The maximum delay between + consecutive deliveries of a given message. Value + should be between 0 and 600 seconds. Defaults to + 600 seconds. + """ + + minimum_backoff: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + maximum_backoff: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class DeadLetterPolicy(proto.Message): + r"""Dead lettering is done on a best effort basis. The same + message might be dead lettered multiple times. + + If validation on any of the fields fails at subscription + creation/updation, the create/update subscription request will + fail. + + Attributes: + dead_letter_topic (str): + Optional. The name of the topic to which dead letter + messages should be published. Format is + ``projects/{project}/topics/{topic}``.The Pub/Sub service + account associated with the enclosing subscription's parent + project (i.e., + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) + must have permission to Publish() to this topic. + + The operation will fail if the topic does not exist. Users + should ensure that there is a subscription attached to this + topic since messages published to a topic with no + subscriptions are lost. + max_delivery_attempts (int): + Optional. The maximum number of delivery attempts for any + message. The value must be between 5 and 100. + + The number of delivery attempts is defined as 1 + (the sum + of number of NACKs and number of times the acknowledgment + deadline has been exceeded for the message). + + A NACK is any call to ModifyAckDeadline with a 0 deadline. + Note that client libraries may automatically extend + ack_deadlines. + + This field will be honored on a best effort basis. + + If this parameter is 0, a default value of 5 is used. + """ + + dead_letter_topic: str = proto.Field( + proto.STRING, + number=1, + ) + max_delivery_attempts: int = proto.Field( + proto.INT32, + number=2, + ) + + +class ExpirationPolicy(proto.Message): + r"""A policy that specifies the conditions for resource + expiration (i.e., automatic resource deletion). + + Attributes: + ttl (google.protobuf.duration_pb2.Duration): + Optional. Specifies the "time-to-live" duration for an + associated resource. The resource expires if it is not + active for a period of ``ttl``. The definition of "activity" + depends on the type of the associated resource. The minimum + and maximum allowed values for ``ttl`` depend on the type of + the associated resource, as well. If ``ttl`` is not set, the + associated resource never expires. + """ + + ttl: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + + +class PushConfig(proto.Message): + r"""Configuration for a push delivery endpoint. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + push_endpoint (str): + Optional. A URL locating the endpoint to which messages + should be pushed. For example, a Webhook endpoint might use + ``https://example.com/push``. + attributes (MutableMapping[str, str]): + Optional. Endpoint configuration attributes that can be used + to control different aspects of the message delivery. + + The only currently supported attribute is + ``x-goog-version``, which you can use to change the format + of the pushed message. This attribute indicates the version + of the data expected by the endpoint. This controls the + shape of the pushed message (i.e., its fields and metadata). + + If not present during the ``CreateSubscription`` call, it + will default to the version of the Pub/Sub API used to make + such call. If not present in a ``ModifyPushConfig`` call, + its value will not be changed. ``GetSubscription`` calls + will always return a valid version, even if the subscription + was created without this attribute. + + The only supported values for the ``x-goog-version`` + attribute are: + + - ``v1beta1``: uses the push format defined in the v1beta1 + Pub/Sub API. + - ``v1`` or ``v1beta2``: uses the push format defined in the + v1 Pub/Sub API. + + For example: ``attributes { "x-goog-version": "v1" }`` + oidc_token (google.pubsub_v1.types.PushConfig.OidcToken): + Optional. If specified, Pub/Sub will generate and attach an + OIDC JWT token as an ``Authorization`` header in the HTTP + request for every pushed message. + + This field is a member of `oneof`_ ``authentication_method``. + pubsub_wrapper (google.pubsub_v1.types.PushConfig.PubsubWrapper): + Optional. When set, the payload to the push + endpoint is in the form of the JSON + representation of a PubsubMessage + (https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#pubsubmessage). + + This field is a member of `oneof`_ ``wrapper``. + no_wrapper (google.pubsub_v1.types.PushConfig.NoWrapper): + Optional. When set, the payload to the push + endpoint is not wrapped. + + This field is a member of `oneof`_ ``wrapper``. + """ + + class OidcToken(proto.Message): + r"""Contains information needed for generating an `OpenID Connect + token `__. + + Attributes: + service_account_email (str): + Optional. `Service account + email `__ + used for generating the OIDC token. For more information on + setting up authentication, see `Push + subscriptions `__. + audience (str): + Optional. Audience to be used when generating + OIDC token. The audience claim identifies the + recipients that the JWT is intended for. The + audience value is a single case-sensitive + string. Having multiple values (array) for the + audience field is not supported. More info about + the OIDC JWT token audience here: + + https://tools.ietf.org/html/rfc7519#section-4.1.3 + Note: if not specified, the Push endpoint URL + will be used. + """ + + service_account_email: str = proto.Field( + proto.STRING, + number=1, + ) + audience: str = proto.Field( + proto.STRING, + number=2, + ) + + class PubsubWrapper(proto.Message): + r"""The payload to the push endpoint is in the form of the JSON + representation of a PubsubMessage + (https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#pubsubmessage). + + """ + + class NoWrapper(proto.Message): + r"""Sets the ``data`` field as the HTTP body for delivery. + + Attributes: + write_metadata (bool): + Optional. When true, writes the Pub/Sub message metadata to + ``x-goog-pubsub-:`` headers of the HTTP request. + Writes the Pub/Sub message attributes to ``:`` + headers of the HTTP request. + """ + + write_metadata: bool = proto.Field( + proto.BOOL, + number=1, + ) + + push_endpoint: str = proto.Field( + proto.STRING, + number=1, + ) + attributes: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + oidc_token: OidcToken = proto.Field( + proto.MESSAGE, + number=3, + oneof="authentication_method", + message=OidcToken, + ) + pubsub_wrapper: PubsubWrapper = proto.Field( + proto.MESSAGE, + number=4, + oneof="wrapper", + message=PubsubWrapper, + ) + no_wrapper: NoWrapper = proto.Field( + proto.MESSAGE, + number=5, + oneof="wrapper", + message=NoWrapper, + ) + + +class BigQueryConfig(proto.Message): + r"""Configuration for a BigQuery subscription. + + Attributes: + table (str): + Optional. The name of the table to which to + write data, of the form + {projectId}.{datasetId}.{tableId} + use_topic_schema (bool): + Optional. When true, use the topic's schema as the columns + to write to in BigQuery, if it exists. ``use_topic_schema`` + and ``use_table_schema`` cannot be enabled at the same time. + write_metadata (bool): + Optional. When true, write the subscription name, + message_id, publish_time, attributes, and ordering_key to + additional columns in the table. The subscription name, + message_id, and publish_time fields are put in their own + columns while all other message properties (other than data) + are written to a JSON object in the attributes column. + drop_unknown_fields (bool): + Optional. When true and use_topic_schema is true, any fields + that are a part of the topic schema that are not part of the + BigQuery table schema are dropped when writing to BigQuery. + Otherwise, the schemas must be kept in sync and any messages + with extra fields are not written and remain in the + subscription's backlog. + state (google.pubsub_v1.types.BigQueryConfig.State): + Output only. An output-only field that + indicates whether or not the subscription can + receive messages. + use_table_schema (bool): + Optional. When true, use the BigQuery table's schema as the + columns to write to in BigQuery. ``use_table_schema`` and + ``use_topic_schema`` cannot be enabled at the same time. + service_account_email (str): + Optional. The service account to use to write to BigQuery. + The subscription creator or updater that specifies this + field must have ``iam.serviceAccounts.actAs`` permission on + the service account. If not specified, the Pub/Sub `service + agent `__, + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com, + is used. + """ + + class State(proto.Enum): + r"""Possible states for a BigQuery subscription. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + The subscription can actively send messages + to BigQuery + PERMISSION_DENIED (2): + Cannot write to the BigQuery table because of permission + denied errors. This can happen if + + - Pub/Sub SA has not been granted the `appropriate BigQuery + IAM + permissions `__ + - bigquery.googleapis.com API is not enabled for the project + (`instructions `__) + NOT_FOUND (3): + Cannot write to the BigQuery table because it + does not exist. + SCHEMA_MISMATCH (4): + Cannot write to the BigQuery table due to a + schema mismatch. + IN_TRANSIT_LOCATION_RESTRICTION (5): + Cannot write to the destination because enforce_in_transit + is set to true and the destination locations are not in the + allowed regions. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + PERMISSION_DENIED = 2 + NOT_FOUND = 3 + SCHEMA_MISMATCH = 4 + IN_TRANSIT_LOCATION_RESTRICTION = 5 + + table: str = proto.Field( + proto.STRING, + number=1, + ) + use_topic_schema: bool = proto.Field( + proto.BOOL, + number=2, + ) + write_metadata: bool = proto.Field( + proto.BOOL, + number=3, + ) + drop_unknown_fields: bool = proto.Field( + proto.BOOL, + number=4, + ) + state: State = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + use_table_schema: bool = proto.Field( + proto.BOOL, + number=6, + ) + service_account_email: str = proto.Field( + proto.STRING, + number=7, + ) + + +class CloudStorageConfig(proto.Message): + r"""Configuration for a Cloud Storage subscription. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bucket (str): + Required. User-provided name for the Cloud Storage bucket. + The bucket must be created by the user. The bucket name must + be without any prefix like "gs://". See the [bucket naming + requirements] + (https://cloud.google.com/storage/docs/buckets#naming). + filename_prefix (str): + Optional. User-provided prefix for Cloud Storage filename. + See the `object naming + requirements `__. + filename_suffix (str): + Optional. User-provided suffix for Cloud Storage filename. + See the `object naming + requirements `__. + Must not end in "/". + filename_datetime_format (str): + Optional. User-provided format string specifying how to + represent datetimes in Cloud Storage filenames. See the + `datetime format + guidance `__. + text_config (google.pubsub_v1.types.CloudStorageConfig.TextConfig): + Optional. If set, message data will be + written to Cloud Storage in text format. + + This field is a member of `oneof`_ ``output_format``. + avro_config (google.pubsub_v1.types.CloudStorageConfig.AvroConfig): + Optional. If set, message data will be + written to Cloud Storage in Avro format. + + This field is a member of `oneof`_ ``output_format``. + max_duration (google.protobuf.duration_pb2.Duration): + Optional. The maximum duration that can + elapse before a new Cloud Storage file is + created. Min 1 minute, max 10 minutes, default 5 + minutes. May not exceed the subscription's + acknowledgment deadline. + max_bytes (int): + Optional. The maximum bytes that can be written to a Cloud + Storage file before a new file is created. Min 1 KB, max 10 + GiB. The max_bytes limit may be exceeded in cases where + messages are larger than the limit. + max_messages (int): + Optional. The maximum number of messages that + can be written to a Cloud Storage file before a + new file is created. Min 1000 messages. + state (google.pubsub_v1.types.CloudStorageConfig.State): + Output only. An output-only field that + indicates whether or not the subscription can + receive messages. + service_account_email (str): + Optional. The service account to use to write to Cloud + Storage. The subscription creator or updater that specifies + this field must have ``iam.serviceAccounts.actAs`` + permission on the service account. If not specified, the + Pub/Sub `service + agent `__, + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com, + is used. + """ + + class State(proto.Enum): + r"""Possible states for a Cloud Storage subscription. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + The subscription can actively send messages + to Cloud Storage. + PERMISSION_DENIED (2): + Cannot write to the Cloud Storage bucket + because of permission denied errors. + NOT_FOUND (3): + Cannot write to the Cloud Storage bucket + because it does not exist. + IN_TRANSIT_LOCATION_RESTRICTION (4): + Cannot write to the destination because enforce_in_transit + is set to true and the destination locations are not in the + allowed regions. + SCHEMA_MISMATCH (5): + Cannot write to the Cloud Storage bucket due + to an incompatibility between the topic schema + and subscription settings. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + PERMISSION_DENIED = 2 + NOT_FOUND = 3 + IN_TRANSIT_LOCATION_RESTRICTION = 4 + SCHEMA_MISMATCH = 5 + + class TextConfig(proto.Message): + r"""Configuration for writing message data in text format. + Message payloads will be written to files as raw text, separated + by a newline. + + """ + + class AvroConfig(proto.Message): + r"""Configuration for writing message data in Avro format. + Message payloads and metadata will be written to files as an + Avro binary. + + Attributes: + write_metadata (bool): + Optional. When true, write the subscription name, + message_id, publish_time, attributes, and ordering_key as + additional fields in the output. The subscription name, + message_id, and publish_time fields are put in their own + fields while all other message properties other than data + (for example, an ordering_key, if present) are added as + entries in the attributes map. + use_topic_schema (bool): + Optional. When true, the output Cloud Storage + file will be serialized using the topic schema, + if it exists. + """ + + write_metadata: bool = proto.Field( + proto.BOOL, + number=1, + ) + use_topic_schema: bool = proto.Field( + proto.BOOL, + number=2, + ) + + bucket: str = proto.Field( + proto.STRING, + number=1, + ) + filename_prefix: str = proto.Field( + proto.STRING, + number=2, + ) + filename_suffix: str = proto.Field( + proto.STRING, + number=3, + ) + filename_datetime_format: str = proto.Field( + proto.STRING, + number=10, + ) + text_config: TextConfig = proto.Field( + proto.MESSAGE, + number=4, + oneof="output_format", + message=TextConfig, + ) + avro_config: AvroConfig = proto.Field( + proto.MESSAGE, + number=5, + oneof="output_format", + message=AvroConfig, + ) + max_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=6, + message=duration_pb2.Duration, + ) + max_bytes: int = proto.Field( + proto.INT64, + number=7, + ) + max_messages: int = proto.Field( + proto.INT64, + number=8, + ) + state: State = proto.Field( + proto.ENUM, + number=9, + enum=State, + ) + service_account_email: str = proto.Field( + proto.STRING, + number=11, + ) + + +class ReceivedMessage(proto.Message): + r"""A message and its corresponding acknowledgment ID. + + Attributes: + ack_id (str): + Optional. This ID can be used to acknowledge + the received message. + message (google.pubsub_v1.types.PubsubMessage): + Optional. The message. + delivery_attempt (int): + Optional. The approximate number of times that Pub/Sub has + attempted to deliver the associated message to a subscriber. + + More precisely, this is 1 + (number of NACKs) + (number of + ack_deadline exceeds) for this message. + + A NACK is any call to ModifyAckDeadline with a 0 deadline. + An ack_deadline exceeds event is whenever a message is not + acknowledged within ack_deadline. Note that ack_deadline is + initially Subscription.ackDeadlineSeconds, but may get + extended automatically by the client library. + + Upon the first delivery of a given message, + ``delivery_attempt`` will have a value of 1. The value is + calculated at best effort and is approximate. + + If a DeadLetterPolicy is not set on the subscription, this + will be 0. + """ + + ack_id: str = proto.Field( + proto.STRING, + number=1, + ) + message: "PubsubMessage" = proto.Field( + proto.MESSAGE, + number=2, + message="PubsubMessage", + ) + delivery_attempt: int = proto.Field( + proto.INT32, + number=3, + ) + + +class GetSubscriptionRequest(proto.Message): + r"""Request for the GetSubscription method. + + Attributes: + subscription (str): + Required. The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. + """ + + subscription: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateSubscriptionRequest(proto.Message): + r"""Request for the UpdateSubscription method. + + Attributes: + subscription (google.pubsub_v1.types.Subscription): + Required. The updated subscription object. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Indicates which fields in the + provided subscription to update. Must be + specified and non-empty. + """ + + subscription: "Subscription" = proto.Field( + proto.MESSAGE, + number=1, + message="Subscription", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListSubscriptionsRequest(proto.Message): + r"""Request for the ``ListSubscriptions`` method. + + Attributes: + project (str): + Required. The name of the project in which to list + subscriptions. Format is ``projects/{project-id}``. + page_size (int): + Optional. Maximum number of subscriptions to + return. + page_token (str): + Optional. The value returned by the last + ``ListSubscriptionsResponse``; indicates that this is a + continuation of a prior ``ListSubscriptions`` call, and that + the system should return the next page of data. + """ + + project: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSubscriptionsResponse(proto.Message): + r"""Response for the ``ListSubscriptions`` method. + + Attributes: + subscriptions (MutableSequence[google.pubsub_v1.types.Subscription]): + Optional. The subscriptions that match the + request. + next_page_token (str): + Optional. If not empty, indicates that there may be more + subscriptions that match the request; this value should be + passed in a new ``ListSubscriptionsRequest`` to get more + subscriptions. + """ + + @property + def raw_page(self): + return self + + subscriptions: MutableSequence["Subscription"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Subscription", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteSubscriptionRequest(proto.Message): + r"""Request for the DeleteSubscription method. + + Attributes: + subscription (str): + Required. The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. + """ + + subscription: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ModifyPushConfigRequest(proto.Message): + r"""Request for the ModifyPushConfig method. + + Attributes: + subscription (str): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + push_config (google.pubsub_v1.types.PushConfig): + Required. The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub system + should stop pushing messages from the given subscription and + allow messages to be pulled and acknowledged - effectively + pausing the subscription if ``Pull`` or ``StreamingPull`` is + not called. + """ + + subscription: str = proto.Field( + proto.STRING, + number=1, + ) + push_config: "PushConfig" = proto.Field( + proto.MESSAGE, + number=2, + message="PushConfig", + ) + + +class PullRequest(proto.Message): + r"""Request for the ``Pull`` method. + + Attributes: + subscription (str): + Required. The subscription from which messages should be + pulled. Format is + ``projects/{project}/subscriptions/{sub}``. + return_immediately (bool): + Optional. If this field set to true, the system will respond + immediately even if it there are no messages available to + return in the ``Pull`` response. Otherwise, the system may + wait (for a bounded amount of time) until at least one + message is available, rather than returning no messages. + Warning: setting this field to ``true`` is discouraged + because it adversely impacts the performance of ``Pull`` + operations. We recommend that users do not set this field. + max_messages (int): + Required. The maximum number of messages to + return for this request. Must be a positive + integer. The Pub/Sub system may return fewer + than the number specified. + """ + + subscription: str = proto.Field( + proto.STRING, + number=1, + ) + return_immediately: bool = proto.Field( + proto.BOOL, + number=2, + ) + max_messages: int = proto.Field( + proto.INT32, + number=3, + ) + + +class PullResponse(proto.Message): + r"""Response for the ``Pull`` method. + + Attributes: + received_messages (MutableSequence[google.pubsub_v1.types.ReceivedMessage]): + Optional. Received Pub/Sub messages. The list will be empty + if there are no more messages available in the backlog, or + if no messages could be returned before the request timeout. + For JSON, the response can be entirely empty. The Pub/Sub + system may return fewer than the ``maxMessages`` requested + even if there are more messages available in the backlog. + """ + + received_messages: MutableSequence["ReceivedMessage"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ReceivedMessage", + ) + + +class ModifyAckDeadlineRequest(proto.Message): + r"""Request for the ModifyAckDeadline method. + + Attributes: + subscription (str): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids (MutableSequence[str]): + Required. List of acknowledgment IDs. + ack_deadline_seconds (int): + Required. The new ack deadline with respect to the time this + request was sent to the Pub/Sub system. For example, if the + value is 10, the new ack deadline will expire 10 seconds + after the ``ModifyAckDeadline`` call was made. Specifying + zero might immediately make the message available for + delivery to another subscriber client. This typically + results in an increase in the rate of message redeliveries + (that is, duplicates). The minimum deadline you can specify + is 0 seconds. The maximum deadline you can specify in a + single request is 600 seconds (10 minutes). + """ + + subscription: str = proto.Field( + proto.STRING, + number=1, + ) + ack_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + ack_deadline_seconds: int = proto.Field( + proto.INT32, + number=3, + ) + + +class AcknowledgeRequest(proto.Message): + r"""Request for the Acknowledge method. + + Attributes: + subscription (str): + Required. The subscription whose message is being + acknowledged. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids (MutableSequence[str]): + Required. The acknowledgment ID for the messages being + acknowledged that was returned by the Pub/Sub system in the + ``Pull`` response. Must not be empty. + """ + + subscription: str = proto.Field( + proto.STRING, + number=1, + ) + ack_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class StreamingPullRequest(proto.Message): + r"""Request for the ``StreamingPull`` streaming RPC method. This request + is used to establish the initial stream as well as to stream + acknowledgments and ack deadline modifications from the client to + the server. + + Attributes: + subscription (str): + Required. The subscription for which to initialize the new + stream. This must be provided in the first request on the + stream, and must not be set in subsequent requests from + client to server. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids (MutableSequence[str]): + Optional. List of acknowledgment IDs for acknowledging + previously received messages (received on this stream or a + different stream). If an ack ID has expired, the + corresponding message may be redelivered later. + Acknowledging a message more than once will not result in an + error. If the acknowledgment ID is malformed, the stream + will be aborted with status ``INVALID_ARGUMENT``. + modify_deadline_seconds (MutableSequence[int]): + Optional. The list of new ack deadlines for the IDs listed + in ``modify_deadline_ack_ids``. The size of this list must + be the same as the size of ``modify_deadline_ack_ids``. If + it differs the stream will be aborted with + ``INVALID_ARGUMENT``. Each element in this list is applied + to the element in the same position in + ``modify_deadline_ack_ids``. The new ack deadline is with + respect to the time this request was sent to the Pub/Sub + system. Must be >= 0. For example, if the value is 10, the + new ack deadline will expire 10 seconds after this request + is received. If the value is 0, the message is immediately + made available for another streaming or non-streaming pull + request. If the value is < 0 (an error), the stream will be + aborted with status ``INVALID_ARGUMENT``. + modify_deadline_ack_ids (MutableSequence[str]): + Optional. List of acknowledgment IDs whose deadline will be + modified based on the corresponding element in + ``modify_deadline_seconds``. This field can be used to + indicate that more time is needed to process a message by + the subscriber, or to make the message available for + redelivery if the processing was interrupted. + stream_ack_deadline_seconds (int): + Required. The ack deadline to use for the + stream. This must be provided in the first + request on the stream, but it can also be + updated on subsequent requests from client to + server. The minimum deadline you can specify is + 10 seconds. The maximum deadline you can specify + is 600 seconds (10 minutes). + client_id (str): + Optional. A unique identifier that is used to distinguish + client instances from each other. Only needs to be provided + on the initial request. When a stream disconnects and + reconnects for the same stream, the client_id should be set + to the same value so that state associated with the old + stream can be transferred to the new stream. The same + client_id should not be used for different client instances. + max_outstanding_messages (int): + Optional. Flow control settings for the maximum number of + outstanding messages. When there are + ``max_outstanding_messages`` currently sent to the streaming + pull client that have not yet been acked or nacked, the + server stops sending more messages. The sending of messages + resumes once the number of outstanding messages is less than + this value. If the value is <= 0, there is no limit to the + number of outstanding messages. This property can only be + set on the initial StreamingPullRequest. If it is set on a + subsequent request, the stream will be aborted with status + ``INVALID_ARGUMENT``. + max_outstanding_bytes (int): + Optional. Flow control settings for the maximum number of + outstanding bytes. When there are ``max_outstanding_bytes`` + or more worth of messages currently sent to the streaming + pull client that have not yet been acked or nacked, the + server will stop sending more messages. The sending of + messages resumes once the number of outstanding bytes is + less than this value. If the value is <= 0, there is no + limit to the number of outstanding bytes. This property can + only be set on the initial StreamingPullRequest. If it is + set on a subsequent request, the stream will be aborted with + status ``INVALID_ARGUMENT``. + protocol_version (int): + Optional. The protocol version used by the client. This + property can only be set on the initial + StreamingPullRequest. If it is set on a subsequent request, + the stream will be aborted with status ``INVALID_ARGUMENT``. + """ + + subscription: str = proto.Field( + proto.STRING, + number=1, + ) + ack_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + modify_deadline_seconds: MutableSequence[int] = proto.RepeatedField( + proto.INT32, + number=3, + ) + modify_deadline_ack_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + stream_ack_deadline_seconds: int = proto.Field( + proto.INT32, + number=5, + ) + client_id: str = proto.Field( + proto.STRING, + number=6, + ) + max_outstanding_messages: int = proto.Field( + proto.INT64, + number=7, + ) + max_outstanding_bytes: int = proto.Field( + proto.INT64, + number=8, + ) + protocol_version: int = proto.Field( + proto.INT64, + number=10, + ) + + +class StreamingPullResponse(proto.Message): + r"""Response for the ``StreamingPull`` method. This response is used to + stream messages from the server to the client. + + Attributes: + received_messages (MutableSequence[google.pubsub_v1.types.ReceivedMessage]): + Optional. Received Pub/Sub messages. + acknowledge_confirmation (google.pubsub_v1.types.StreamingPullResponse.AcknowledgeConfirmation): + Optional. This field will only be set if + ``enable_exactly_once_delivery`` is set to ``true`` and is + not guaranteed to be populated. + modify_ack_deadline_confirmation (google.pubsub_v1.types.StreamingPullResponse.ModifyAckDeadlineConfirmation): + Optional. This field will only be set if + ``enable_exactly_once_delivery`` is set to ``true`` and is + not guaranteed to be populated. + subscription_properties (google.pubsub_v1.types.StreamingPullResponse.SubscriptionProperties): + Optional. Properties associated with this + subscription. + """ + + class AcknowledgeConfirmation(proto.Message): + r"""Acknowledgment IDs sent in one or more previous requests to + acknowledge a previously received message. + + Attributes: + ack_ids (MutableSequence[str]): + Optional. Successfully processed + acknowledgment IDs. + invalid_ack_ids (MutableSequence[str]): + Optional. List of acknowledgment IDs that + were malformed or whose acknowledgment deadline + has expired. + unordered_ack_ids (MutableSequence[str]): + Optional. List of acknowledgment IDs that + were out of order. + temporary_failed_ack_ids (MutableSequence[str]): + Optional. List of acknowledgment IDs that + failed processing with temporary issues. + """ + + ack_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + invalid_ack_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + unordered_ack_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + temporary_failed_ack_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + class ModifyAckDeadlineConfirmation(proto.Message): + r"""Acknowledgment IDs sent in one or more previous requests to + modify the deadline for a specific message. + + Attributes: + ack_ids (MutableSequence[str]): + Optional. Successfully processed + acknowledgment IDs. + invalid_ack_ids (MutableSequence[str]): + Optional. List of acknowledgment IDs that + were malformed or whose acknowledgment deadline + has expired. + temporary_failed_ack_ids (MutableSequence[str]): + Optional. List of acknowledgment IDs that + failed processing with temporary issues. + """ + + ack_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + invalid_ack_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + temporary_failed_ack_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + class SubscriptionProperties(proto.Message): + r"""Subscription properties sent as part of the response. + + Attributes: + exactly_once_delivery_enabled (bool): + Optional. True iff exactly once delivery is + enabled for this subscription. + message_ordering_enabled (bool): + Optional. True iff message ordering is + enabled for this subscription. + """ + + exactly_once_delivery_enabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + message_ordering_enabled: bool = proto.Field( + proto.BOOL, + number=2, + ) + + received_messages: MutableSequence["ReceivedMessage"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ReceivedMessage", + ) + acknowledge_confirmation: AcknowledgeConfirmation = proto.Field( + proto.MESSAGE, + number=5, + message=AcknowledgeConfirmation, + ) + modify_ack_deadline_confirmation: ModifyAckDeadlineConfirmation = proto.Field( + proto.MESSAGE, + number=3, + message=ModifyAckDeadlineConfirmation, + ) + subscription_properties: SubscriptionProperties = proto.Field( + proto.MESSAGE, + number=4, + message=SubscriptionProperties, + ) + + +class CreateSnapshotRequest(proto.Message): + r"""Request for the ``CreateSnapshot`` method. + + Attributes: + name (str): + Required. User-provided name for this snapshot. If the name + is not provided in the request, the server will assign a + random name for this snapshot on the same project as the + subscription. Note that for REST API requests, you must + specify a name. See the `resource name + rules `__. + Format is ``projects/{project}/snapshots/{snap}``. + subscription (str): + Required. The subscription whose backlog the snapshot + retains. Specifically, the created snapshot is guaranteed to + retain: (a) The existing backlog on the subscription. More + precisely, this is defined as the messages in the + subscription's backlog that are unacknowledged upon the + successful completion of the ``CreateSnapshot`` request; as + well as: (b) Any messages published to the subscription's + topic following the successful completion of the + CreateSnapshot request. Format is + ``projects/{project}/subscriptions/{sub}``. + labels (MutableMapping[str, str]): + Optional. See `Creating and managing + labels `__. + tags (MutableMapping[str, str]): + Optional. Input only. Immutable. Tag + keys/values directly bound to this resource. For + example: + + "123/environment": "production", + "123/costCenter": "marketing". + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + subscription: str = proto.Field( + proto.STRING, + number=2, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + + +class UpdateSnapshotRequest(proto.Message): + r"""Request for the UpdateSnapshot method. + + Attributes: + snapshot (google.pubsub_v1.types.Snapshot): + Required. The updated snapshot object. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Indicates which fields in the + provided snapshot to update. Must be specified + and non-empty. + """ + + snapshot: "Snapshot" = proto.Field( + proto.MESSAGE, + number=1, + message="Snapshot", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class Snapshot(proto.Message): + r"""A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages in + an existing subscription to the state captured by a snapshot. + + Attributes: + name (str): + Optional. The name of the snapshot. + topic (str): + Optional. The name of the topic from which + this snapshot is retaining messages. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The snapshot is guaranteed to exist up until this + time. A newly-created snapshot expires no later than 7 days + from the time of its creation. Its exact lifetime is + determined at creation by the existing backlog in the source + subscription. Specifically, the lifetime of the snapshot is + ``7 days - (age of oldest unacked message in the subscription)``. + For example, consider a subscription whose oldest unacked + message is 3 days old. If a snapshot is created from this + subscription, the snapshot -- which will always capture this + 3-day-old backlog as long as the snapshot exists -- will + expire in 4 days. The service will refuse to create a + snapshot that would expire in less than 1 hour after + creation. + labels (MutableMapping[str, str]): + Optional. See [Creating and managing labels] + (https://cloud.google.com/pubsub/docs/labels). + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + topic: str = proto.Field( + proto.STRING, + number=2, + ) + expire_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + + +class GetSnapshotRequest(proto.Message): + r"""Request for the GetSnapshot method. + + Attributes: + snapshot (str): + Required. The name of the snapshot to get. Format is + ``projects/{project}/snapshots/{snap}``. + """ + + snapshot: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSnapshotsRequest(proto.Message): + r"""Request for the ``ListSnapshots`` method. + + Attributes: + project (str): + Required. The name of the project in which to list + snapshots. Format is ``projects/{project-id}``. + page_size (int): + Optional. Maximum number of snapshots to + return. + page_token (str): + Optional. The value returned by the last + ``ListSnapshotsResponse``; indicates that this is a + continuation of a prior ``ListSnapshots`` call, and that the + system should return the next page of data. + """ + + project: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSnapshotsResponse(proto.Message): + r"""Response for the ``ListSnapshots`` method. + + Attributes: + snapshots (MutableSequence[google.pubsub_v1.types.Snapshot]): + Optional. The resulting snapshots. + next_page_token (str): + Optional. If not empty, indicates that there may be more + snapshot that match the request; this value should be passed + in a new ``ListSnapshotsRequest``. + """ + + @property + def raw_page(self): + return self + + snapshots: MutableSequence["Snapshot"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Snapshot", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteSnapshotRequest(proto.Message): + r"""Request for the ``DeleteSnapshot`` method. + + Attributes: + snapshot (str): + Required. The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. + """ + + snapshot: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SeekRequest(proto.Message): + r"""Request for the ``Seek`` method. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + subscription (str): + Required. The subscription to affect. + time (google.protobuf.timestamp_pb2.Timestamp): + Optional. The time to seek to. Messages retained in the + subscription that were published before this time are marked + as acknowledged, and messages retained in the subscription + that were published after this time are marked as + unacknowledged. Note that this operation affects only those + messages retained in the subscription (configured by the + combination of ``message_retention_duration`` and + ``retain_acked_messages``). For example, if ``time`` + corresponds to a point before the message retention window + (or to a point before the system's notion of the + subscription creation time), only retained messages will be + marked as unacknowledged, and already-expunged messages will + not be restored. + + This field is a member of `oneof`_ ``target``. + snapshot (str): + Optional. The snapshot to seek to. The snapshot's topic must + be the same as that of the provided subscription. Format is + ``projects/{project}/snapshots/{snap}``. + + This field is a member of `oneof`_ ``target``. + """ + + subscription: str = proto.Field( + proto.STRING, + number=1, + ) + time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + oneof="target", + message=timestamp_pb2.Timestamp, + ) + snapshot: str = proto.Field( + proto.STRING, + number=3, + oneof="target", + ) + + +class SeekResponse(proto.Message): + r"""Response for the ``Seek`` method (this response is empty).""" + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/pubsub_v1/types/schema.py b/google/pubsub_v1/types/schema.py new file mode 100644 index 000000000..e1f376ed9 --- /dev/null +++ b/google/pubsub_v1/types/schema.py @@ -0,0 +1,511 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package="google.pubsub.v1", + manifest={ + "SchemaView", + "Encoding", + "Schema", + "CreateSchemaRequest", + "GetSchemaRequest", + "ListSchemasRequest", + "ListSchemasResponse", + "ListSchemaRevisionsRequest", + "ListSchemaRevisionsResponse", + "CommitSchemaRequest", + "RollbackSchemaRequest", + "DeleteSchemaRevisionRequest", + "DeleteSchemaRequest", + "ValidateSchemaRequest", + "ValidateSchemaResponse", + "ValidateMessageRequest", + "ValidateMessageResponse", + }, +) + + +class SchemaView(proto.Enum): + r"""View of Schema object fields to be returned by GetSchema and + ListSchemas. + + Values: + SCHEMA_VIEW_UNSPECIFIED (0): + The default / unset value. + The API will default to the BASIC view. + BASIC (1): + Include the name and type of the schema, but + not the definition. + FULL (2): + Include all Schema object fields. + """ + SCHEMA_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + +class Encoding(proto.Enum): + r"""Possible encoding types for messages. + + Values: + ENCODING_UNSPECIFIED (0): + Unspecified + JSON (1): + JSON encoding + BINARY (2): + Binary encoding, as defined by the schema + type. For some schema types, binary encoding may + not be available. + """ + ENCODING_UNSPECIFIED = 0 + JSON = 1 + BINARY = 2 + + +class Schema(proto.Message): + r"""A schema resource. + + Attributes: + name (str): + Required. Name of the schema. Format is + ``projects/{project}/schemas/{schema}``. + type_ (google.pubsub_v1.types.Schema.Type): + The type of the schema definition. + definition (str): + The definition of the schema. This should contain a string + representing the full definition of the schema that is a + valid schema definition of the type specified in ``type``. + revision_id (str): + Output only. Immutable. The revision ID of + the schema. + revision_create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp that the revision + was created. + """ + + class Type(proto.Enum): + r"""Possible schema definition types. + + Values: + TYPE_UNSPECIFIED (0): + Default value. This value is unused. + PROTOCOL_BUFFER (1): + A Protocol Buffer schema definition. + AVRO (2): + An Avro schema definition. + """ + TYPE_UNSPECIFIED = 0 + PROTOCOL_BUFFER = 1 + AVRO = 2 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: Type = proto.Field( + proto.ENUM, + number=2, + enum=Type, + ) + definition: str = proto.Field( + proto.STRING, + number=3, + ) + revision_id: str = proto.Field( + proto.STRING, + number=4, + ) + revision_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + + +class CreateSchemaRequest(proto.Message): + r"""Request for the CreateSchema method. + + Attributes: + parent (str): + Required. The name of the project in which to create the + schema. Format is ``projects/{project-id}``. + schema (google.pubsub_v1.types.Schema): + Required. The schema object to create. + + This schema's ``name`` parameter is ignored. The schema + object returned by CreateSchema will have a ``name`` made + using the given ``parent`` and ``schema_id``. + schema_id (str): + The ID to use for the schema, which will become the final + component of the schema's resource name. + + See + https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names + for resource name constraints. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + schema: "Schema" = proto.Field( + proto.MESSAGE, + number=2, + message="Schema", + ) + schema_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetSchemaRequest(proto.Message): + r"""Request for the GetSchema method. + + Attributes: + name (str): + Required. The name of the schema to get. Format is + ``projects/{project}/schemas/{schema}``. + view (google.pubsub_v1.types.SchemaView): + The set of fields to return in the response. If not set, + returns a Schema with all fields filled out. Set to + ``BASIC`` to omit the ``definition``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: "SchemaView" = proto.Field( + proto.ENUM, + number=2, + enum="SchemaView", + ) + + +class ListSchemasRequest(proto.Message): + r"""Request for the ``ListSchemas`` method. + + Attributes: + parent (str): + Required. The name of the project in which to list schemas. + Format is ``projects/{project-id}``. + view (google.pubsub_v1.types.SchemaView): + The set of Schema fields to return in the response. If not + set, returns Schemas with ``name`` and ``type``, but not + ``definition``. Set to ``FULL`` to retrieve all fields. + page_size (int): + Maximum number of schemas to return. + page_token (str): + The value returned by the last ``ListSchemasResponse``; + indicates that this is a continuation of a prior + ``ListSchemas`` call, and that the system should return the + next page of data. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + view: "SchemaView" = proto.Field( + proto.ENUM, + number=2, + enum="SchemaView", + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListSchemasResponse(proto.Message): + r"""Response for the ``ListSchemas`` method. + + Attributes: + schemas (MutableSequence[google.pubsub_v1.types.Schema]): + The resulting schemas. + next_page_token (str): + If not empty, indicates that there may be more schemas that + match the request; this value should be passed in a new + ``ListSchemasRequest``. + """ + + @property + def raw_page(self): + return self + + schemas: MutableSequence["Schema"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Schema", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ListSchemaRevisionsRequest(proto.Message): + r"""Request for the ``ListSchemaRevisions`` method. + + Attributes: + name (str): + Required. The name of the schema to list + revisions for. + view (google.pubsub_v1.types.SchemaView): + The set of Schema fields to return in the response. If not + set, returns Schemas with ``name`` and ``type``, but not + ``definition``. Set to ``FULL`` to retrieve all fields. + page_size (int): + The maximum number of revisions to return per + page. + page_token (str): + The page token, received from a previous + ListSchemaRevisions call. Provide this to + retrieve the subsequent page. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: "SchemaView" = proto.Field( + proto.ENUM, + number=2, + enum="SchemaView", + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListSchemaRevisionsResponse(proto.Message): + r"""Response for the ``ListSchemaRevisions`` method. + + Attributes: + schemas (MutableSequence[google.pubsub_v1.types.Schema]): + The revisions of the schema. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is empty, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + schemas: MutableSequence["Schema"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Schema", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CommitSchemaRequest(proto.Message): + r"""Request for CommitSchema method. + + Attributes: + name (str): + Required. The name of the schema we are revising. Format is + ``projects/{project}/schemas/{schema}``. + schema (google.pubsub_v1.types.Schema): + Required. The schema revision to commit. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + schema: "Schema" = proto.Field( + proto.MESSAGE, + number=2, + message="Schema", + ) + + +class RollbackSchemaRequest(proto.Message): + r"""Request for the ``RollbackSchema`` method. + + Attributes: + name (str): + Required. The schema being rolled back with + revision id. + revision_id (str): + Required. The revision ID to roll back to. + It must be a revision of the same schema. + + Example: c7cfa2a8 + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + revision_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteSchemaRevisionRequest(proto.Message): + r"""Request for the ``DeleteSchemaRevision`` method. + + Attributes: + name (str): + Required. The name of the schema revision to be deleted, + with a revision ID explicitly included. + + Example: ``projects/123/schemas/my-schema@c7cfa2a8`` + revision_id (str): + Optional. This field is deprecated and should not be used + for specifying the revision ID. The revision ID should be + specified via the ``name`` parameter. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + revision_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteSchemaRequest(proto.Message): + r"""Request for the ``DeleteSchema`` method. + + Attributes: + name (str): + Required. Name of the schema to delete. Format is + ``projects/{project}/schemas/{schema}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ValidateSchemaRequest(proto.Message): + r"""Request for the ``ValidateSchema`` method. + + Attributes: + parent (str): + Required. The name of the project in which to validate + schemas. Format is ``projects/{project-id}``. + schema (google.pubsub_v1.types.Schema): + Required. The schema object to validate. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + schema: "Schema" = proto.Field( + proto.MESSAGE, + number=2, + message="Schema", + ) + + +class ValidateSchemaResponse(proto.Message): + r"""Response for the ``ValidateSchema`` method. Empty for now.""" + + +class ValidateMessageRequest(proto.Message): + r"""Request for the ``ValidateMessage`` method. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The name of the project in which to validate + schemas. Format is ``projects/{project-id}``. + name (str): + Name of the schema against which to validate. + + Format is ``projects/{project}/schemas/{schema}``. + + This field is a member of `oneof`_ ``schema_spec``. + schema (google.pubsub_v1.types.Schema): + Ad-hoc schema against which to validate + + This field is a member of `oneof`_ ``schema_spec``. + message (bytes): + Message to validate against the provided ``schema_spec``. + encoding (google.pubsub_v1.types.Encoding): + The encoding expected for messages + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + name: str = proto.Field( + proto.STRING, + number=2, + oneof="schema_spec", + ) + schema: "Schema" = proto.Field( + proto.MESSAGE, + number=3, + oneof="schema_spec", + message="Schema", + ) + message: bytes = proto.Field( + proto.BYTES, + number=4, + ) + encoding: "Encoding" = proto.Field( + proto.ENUM, + number=5, + enum="Encoding", + ) + + +class ValidateMessageResponse(proto.Message): + r"""Response for the ``ValidateMessage`` method. Empty for now.""" + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/librarian.py b/librarian.py new file mode 100644 index 000000000..5e6af7955 --- /dev/null +++ b/librarian.py @@ -0,0 +1,352 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from pathlib import Path +import re +import shutil +import textwrap + +import synthtool as s +import synthtool.gcp as gcp +from synthtool.languages import python + +# ---------------------------------------------------------------------------- +# Copy the generated client from the owl-bot staging directory +# ---------------------------------------------------------------------------- + +clean_up_generated_samples = True + +# Load the default version defined in .repo-metadata.json. +default_version = json.load(open(".repo-metadata.json", "rt")).get( + "default_version" +) + +for library in s.get_staging_dirs(default_version): + if clean_up_generated_samples: + shutil.rmtree("samples/generated_samples", ignore_errors=True) + clean_up_generated_samples = False + + # DEFAULT SCOPES and SERVICE_ADDRESS are being used. so let's force them in. + s.replace( + library / f"google/pubsub_{library.name}/services/*er/*client.py", + r"""DEFAULT_ENDPOINT = \"pubsub\.googleapis\.com\"""", + """ + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', + ) + + SERVICE_ADDRESS = "pubsub.googleapis.com:443" + \"""The default address of the service.\""" + + \g<0>""", + ) + + # Modify GRPC options in transports. + count = s.replace( + [ + library / f"google/pubsub_{library.name}/services/*/transports/grpc*", + library / f"tests/unit/gapic/pubsub_{library.name}/*", + ], + "options=\[.*?\]", + """options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ]""", + flags=re.MULTILINE | re.DOTALL, + ) + + if count < 15: + raise Exception("Expected replacements for gRPC channel options not made.") + + # If the emulator is used, force an insecure gRPC channel to avoid SSL errors. + clients_to_patch = [ + library / f"google/pubsub_{library.name}/services/publisher/client.py", + library / f"google/pubsub_{library.name}/services/subscriber/client.py", + library / f"google/pubsub_{library.name}/services/schema_service/client.py", + ] + err_msg = ( + "Expected replacements for gRPC channel to use with the emulator not made." + ) + + count = s.replace(clients_to_patch, r"import os", "import functools\n\g<0>") + + if count < len(clients_to_patch): + raise Exception(err_msg) + + count = s.replace( + clients_to_patch, + f"from \.transports\.base", + "\nimport grpc\n\g<0>", + ) + + if count < len(clients_to_patch): + raise Exception(err_msg) + + # TODO(https://github.com/googleapis/python-pubsub/issues/1349): Move the emulator + # code below to test files. + count = s.replace( + clients_to_patch, + r"# initialize with the provided callable or the passed in class", + """\g<0> + + emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") + if emulator_host: + if issubclass(transport_init, type(self)._transport_registry["grpc"]): # type: ignore + channel = grpc.insecure_channel(target=emulator_host) + else: + channel = grpc.aio.insecure_channel(target=emulator_host) + transport_init = functools.partial(transport_init, channel=channel) + + """, + ) + + if count < len(clients_to_patch): + raise Exception(err_msg) + + # Monkey patch the streaming_pull() GAPIC method to disable pre-fetching stream + # results. + s.replace( + library / f"google/pubsub_{library.name}/services/subscriber/client.py", + ( + r"# Wrap the RPC method.*\n" + r"\s+# and friendly error.*\n" + r"\s+rpc = self\._transport\._wrapped_methods\[self\._transport\.streaming_pull\]" + ), + """ + # Wrappers in api-core should not automatically pre-fetch the first + # stream result, as this breaks the stream when re-opening it. + # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 + self._transport.streaming_pull._prefetch_first_result_ = False + + \g<0>""", + ) + + # Emit deprecation warning if return_immediately flag is set with synchronous pull. + s.replace( + library / f"google/pubsub_{library.name}/services/subscriber/*client.py", + r"from google.pubsub_v1 import gapic_version as package_version", + "import warnings\n\g<0>", + ) + + count = s.replace( + library / f"google/pubsub_{library.name}/services/subscriber/*client.py", + r""" + ([^\n\S]+(?:async\ )?def\ pull\(.*?->\ pubsub\.PullResponse:.*?) + ((?P[^\n\S]+)\#\ Wrap\ the\ RPC\ method) + """, + textwrap.dedent( + """ + \g<1> + \gif request.return_immediately: + \g warnings.warn( + \g "The return_immediately flag is deprecated and should be set to False.", + \g category=DeprecationWarning, + \g ) + + \g<2>""" + ), + flags=re.MULTILINE | re.DOTALL | re.VERBOSE, + ) + + if count != 2: + raise Exception("Too many or too few replacements in pull() methods.") + + # Silence deprecation warnings in pull() method flattened parameter tests. + s.replace( + library / f"tests/unit/gapic/pubsub_{library.name}/test_subscriber.py", + "import os", + "\g<0>\nimport warnings", + ) + + count = s.replace( + library / f"tests/unit/gapic/pubsub_{library.name}/test_subscriber.py", + textwrap.dedent( + r""" + ([^\n\S]+# Call the method with a truthy value for each flattened field, + [^\n\S]+# using the keyword arguments to the method\.) + \s+(client\.pull\(.*?\))""" + ), + """\n\g<1> + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + \g<2>""", + flags=re.MULTILINE | re.DOTALL, + ) + + if count < 1: + raise Exception("Catch warnings replacement failed.") + + count = s.replace( + library / f"tests/unit/gapic/pubsub_{library.name}/test_subscriber.py", + textwrap.dedent( + r""" + ([^\n\S]+# Call the method with a truthy value for each flattened field, + [^\n\S]+# using the keyword arguments to the method\.) + \s+response = (await client\.pull\(.*?\))""" + ), + """\n\g<1> + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + \g<2>""", + flags=re.MULTILINE | re.DOTALL, + ) + + if count < 1: + raise Exception("Catch warnings replacement failed.") + + # Make sure that client library version is present in user agent header. + count = s.replace( + [ + library + / f"google/pubsub_{library.name}/services/publisher/async_client.py", + library / f"google/pubsub_{library.name}/services/publisher/client.py", + library + / f"google/pubsub_{library.name}/services/publisher/transports/base.py", + library + / f"google/pubsub_{library.name}/services/schema_service/async_client.py", + library / f"google/pubsub_{library.name}/services/schema_service/client.py", + library + / f"google/pubsub_{library.name}/services/schema_service/transports/base.py", + library + / f"google/pubsub_{library.name}/services/subscriber/async_client.py", + library / f"google/pubsub_{library.name}/services/subscriber/client.py", + library + / f"google/pubsub_{library.name}/services/subscriber/transports/base.py", + ], + r"""gapic_version=package_version.__version__""", + "client_library_version=package_version.__version__", + ) + + if count < 1: + raise Exception("client_library_version replacement failed.") + + # Allow timeout to be an instance of google.api_core.timeout.* + count = s.replace( + library / f"google/pubsub_{library.name}/types/__init__.py", + r"from \.pubsub import \(", + "from typing import Union\n\n\g<0>", + ) + + if count < 1: + raise Exception("Catch timeout replacement 1 failed.") + + count = s.replace( + library / f"google/pubsub_{library.name}/types/__init__.py", + r"__all__ = \(\n", + textwrap.dedent( + '''\ + TimeoutType = Union[ + int, + float, + "google.api_core.timeout.ConstantTimeout", + "google.api_core.timeout.ExponentialTimeout", + ] + """The type of the timeout parameter of publisher client methods.""" + + \g<0> "TimeoutType",''' + ), + ) + + if count < 1: + raise Exception("Catch timeout replacement 2 failed.") + + count = s.replace( + library / f"google/pubsub_{library.name}/services/publisher/*client.py", + r"from google.api_core import retry as retries.*\n", + "\g<0>from google.api_core import timeout as timeouts # type: ignore\n", + ) + + if count < 1: + raise Exception("Catch timeout replacement 3 failed.") + + count = s.replace( + library / f"google/pubsub_{library.name}/services/publisher/*client.py", + f"from google\.pubsub_{library.name}\.types import pubsub", + f"\g<0>\nfrom google.pubsub_{library.name}.types import TimeoutType", + ) + + if count < 1: + raise Exception("Catch timeout replacement 4 failed.") + + count = s.replace( + library / f"google/pubsub_{library.name}/services/publisher/*client.py", + r"(\s+)timeout: Union\[float, object\] = gapic_v1.method.DEFAULT.*\n", + f"\g<1>timeout: TimeoutType = gapic_{library.name}.method.DEFAULT,", + ) + + if count < 1: + raise Exception("Catch timeout replacement 5 failed.") + + count = s.replace( + library / f"google/pubsub_{library.name}/services/publisher/*client.py", + r"([^\S\r\n]+)timeout \(float\): (.*)\n", + ("\g<1>timeout (TimeoutType):\n" "\g<1> \g<2>\n"), + ) + + if count < 1: + raise Exception("Catch timeout replacement 6 failed.") + + # Override the default max retry deadline for publisher methods. + count = s.replace( + library / f"google/pubsub_{library.name}/services/publisher/transports/base.py", + r"deadline=60\.0", + "deadline=600.0", + ) + if count < 9: + raise Exception( + "Default retry deadline not overriden for all publisher methods." + ) + + # The namespace package declaration in google/cloud/__init__.py should be excluded + # from coverage. + count = s.replace( + library / ".coveragerc", + "google/pubsub/__init__.py", + """google/cloud/__init__.py + google/pubsub/__init__.py""", + ) + + if count < 1: + raise Exception(".coveragerc replacement failed.") + + s.move([library], excludes=["noxfile.py", "README.rst", "docs/**/*", "setup.py", "testing/constraints-3.7.txt", "testing/constraints-3.8.txt"]) +s.remove_staging_dirs() + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- + +templated_files = gcp.CommonTemplates().py_library( + microgenerator=True, + samples=True, + cov_level=99, + versions=gcp.common.detect_versions(path="./google", default_first=True), + unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"], + unit_test_dependencies=["flaky"], + system_test_python_versions=["3.12"], + system_test_external_dependencies=["psutil","flaky"], +) +s.move(templated_files, excludes=[".coveragerc", ".github/**", "README.rst", "docs/**", ".kokoro/**"]) + +python.py_samples(skip_readmes=True) + +# run format session for all directories which have a noxfile +for noxfile in Path(".").glob("**/noxfile.py"): + s.shell.run(["nox", "-s", "blacken"], cwd=noxfile.parent, hide_output=False) diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 000000000..574c5aed3 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/noxfile.py b/noxfile.py index 615358c2e..d1b3c15d1 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,21 +14,140 @@ # See the License for the specific language governing permissions and # limitations under the License. +# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` +# The source of truth for this file is `.librarian/generator-input` + + # Generated by synthtool. DO NOT EDIT! from __future__ import absolute_import + import os +import pathlib +import re import shutil +from typing import Dict, List +import warnings import nox +FLAKE8_VERSION = "flake8==6.1.0" +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" +LINT_PATHS = ["google", "tests", "noxfile.py", "setup.py"] + +MYPY_VERSION = "mypy==1.10.0" + +DEFAULT_PYTHON_VERSION = "3.14" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", + "3.14", +] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [ + "flaky", +] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ + "psutil", + "flaky", +] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "mypy", + # https://github.com/googleapis/python-pubsub/pull/552#issuecomment-1016256936 + # "mypy_samples", # TODO: uncomment when the check passes + "docs", + "docfx", + "format", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True -BLACK_VERSION = "black==19.10b0" -BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] +@nox.session(python=DEFAULT_PYTHON_VERSION) +def mypy(session): + """Run type checks with mypy.""" + session.install("-e", ".[all]") + session.install(MYPY_VERSION) + + # Version 2.1.1 of google-api-core version is the first type-checked release. + # Version 2.2.0 of google-cloud-core version is the first type-checked release. + session.install( + "google-api-core[grpc]>=2.1.1", "google-cloud-core>=2.2.0", "types-requests" + ) + + # Just install the type info directly, since "mypy --install-types" might + # require an additional pass. + # Exclude types-protobuf==4.24.0.20240106 + # See https://github.com/python/typeshed/issues/11254 + session.install("types-protobuf!=4.24.0.20240106", "types-setuptools") + + # TODO: Only check the hand-written layer, the generated code does not pass + # mypy checks yet. + # https://github.com/googleapis/gapic-generator-python/issues/1092 + # TODO: Re-enable mypy checks once we merge, since incremental checks are failing due to protobuf upgrade + # session.run("mypy", "-p", "google.cloud", "--exclude", "google/pubsub_v1/") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def mypy_samples(session): + """Run type checks with mypy.""" + + session.install("-e", ".[all]") + + session.install("pytest") + session.install(MYPY_VERSION) + + # Just install the type info directly, since "mypy --install-types" might + # require an additional pass. + session.install( + "types-mock", "types-protobuf", "types-setuptools", "types-requests" + ) + + session.run( + "mypy", + "--config-file", + str(CURRENT_DIRECTORY / "samples" / "snippets" / "mypy.ini"), + "--no-incremental", # Required by warn-unused-configs from mypy.ini to work + "samples/", + ) @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -38,71 +157,171 @@ def lint(session): Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.install("flake8", BLACK_VERSION) + session.install(FLAKE8_VERSION, BLACK_VERSION) session.run( - "black", "--check", *BLACK_PATHS, + "black", + "--check", + *LINT_PATHS, ) session.run("flake8", "google", "tests") -@nox.session(python="3.6") +@nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): - """Run black. + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) - Format code to uniform standard. - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): """ - session.install(BLACK_VERSION) + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections session.run( - "black", *BLACK_PATHS, + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, ) @nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") + session.install("setuptools", "docutils", "pygments") session.run("python", "setup.py", "check", "--restructuredtext", "--strict") -def default(session): +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - session.install("mock", "pytest", "pytest-cov") - session.install("-e", ".") + + if protobuf_implementation == "cpp" and session.python in ( + "3.11", + "3.12", + "3.13", + "3.14", + ): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") # Run py.test against the unit tests. session.run( "py.test", "--quiet", - "--cov=google.cloud.pubsub", - "--cov=google.cloud", - "--cov=tests.unit", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google/cloud", + "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", "--cov-report=", "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") system_test_exists = os.path.exists(system_test_path) system_test_folder_exists = os.path.exists(system_test_folder_path) @@ -110,19 +329,25 @@ def system(session): if not system_test_exists and not system_test_folder_exists: session.skip("System tests were not found") - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install("mock", "pytest", "google-cloud-testutils", "psutil") - session.install("-e", ".") + install_systemtest_dependencies(session, "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) - if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -138,12 +363,26 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=DEFAULT_PYTHON_VERSION) +# py > 3.10 not supported yet +@nox.session(python="3.10") def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -158,3 +397,164 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +# py > 3.10 not supported yet +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.14") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ( + "3.11", + "3.12", + "3.13", + "3.14", + ): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 000000000..a1f4f115e --- /dev/null +++ b/pytest.ini @@ -0,0 +1,38 @@ +[pytest] +filterwarnings = + # treat all warnings as errors + error + # Remove once https://github.com/protocolbuffers/protobuf/issues/12186 is fixed + ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning + # Remove once https://github.com/grpc/grpc/issues/35086 is fixed + ignore:There is no current event loop:DeprecationWarning:grpc.aio._channel + # Remove after support for Python 3.7 is dropped + ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning + # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/1938 is fixed + ignore:The return_immediately flag is deprecated and should be set to False.:DeprecationWarning + # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/1939 is fixed + ignore:get_mtls_endpoint_and_cert_source is deprecated.:DeprecationWarning + # Remove warning once https://github.com/grpc/grpc/issues/35974 is fixed + ignore:unclosed:ResourceWarning + # Added to suppress "DeprecationWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html" + # Remove once the minimum supported version of googleapis-common-protos is 1.62.0 + ignore:.*pkg_resources.declare_namespace:DeprecationWarning + ignore:.*pkg_resources is deprecated as an API:DeprecationWarning + # Remove once https://github.com/googleapis/gapic-generator-python/issues/2303 is fixed + ignore:The python-bigquery library will stop supporting Python 3.7:PendingDeprecationWarning + # Remove once we move off credential files https://github.com/googleapis/google-auth-library-python/pull/1812 + # Note that these are used in tests only + ignore:Your config file at [/home/kbuilder/.docker/config.json] contains these credential helper entries:DeprecationWarning + ignore:The `credentials_file` argument is deprecated because of a potential security risk:DeprecationWarning + ignore:You are using a Python version.*which Google will stop supporting in new releases of google\.api_core.*:FutureWarning + ignore:You are using a non-supported Python version \(([\d\.]+)\)\. Google will not post any further updates to google\.api_core.*:FutureWarning + ignore:You are using a Python version \(([\d\.]+)\) past its end of life\. Google will update google\.api_core.*:FutureWarning + # Remove after support for Python 3.7 is dropped + ignore:You are using a non-supported Python version \(3\.7:FutureWarning + # Remove after support for Python 3.8 is dropped + ignore:You are using a non-supported Python version \(3\.8:DeprecationWarning + ignore:You are using a non-supported Python version \(3\.8:FutureWarning + # Remove after support for Python 3.9 is dropped + ignore:You are using a Python version \(3\.9:FutureWarning + # Remove after support for Python 3.10 is dropped + ignore:.*You are using a Python version \(3\.10:FutureWarning diff --git a/renovate.json b/renovate.json index 4fa949311..c7875c469 100644 --- a/renovate.json +++ b/renovate.json @@ -1,5 +1,12 @@ { "extends": [ - "config:base", ":preserveSemverRanges" - ] + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" + ], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } } diff --git a/samples/AUTHORING_GUIDE.md b/samples/AUTHORING_GUIDE.md index 55c97b32f..8249522ff 100644 --- a/samples/AUTHORING_GUIDE.md +++ b/samples/AUTHORING_GUIDE.md @@ -1 +1 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/samples/CONTRIBUTING.md b/samples/CONTRIBUTING.md index 34c882b6f..f5fe2e6ba 100644 --- a/samples/CONTRIBUTING.md +++ b/samples/CONTRIBUTING.md @@ -1 +1 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/CONTRIBUTING.md \ No newline at end of file diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py b/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py new file mode 100644 index 000000000..e1bf1f2c1 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_CreateTopic_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_create_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.Topic( + name="name_value", + ) + + # Make the request + response = await client.create_topic(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_CreateTopic_async] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py b/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py new file mode 100644 index 000000000..941fea1d4 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_CreateTopic_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_create_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.Topic( + name="name_value", + ) + + # Make the request + response = client.create_topic(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_CreateTopic_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py b/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py new file mode 100644 index 000000000..2fad1b099 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_DeleteTopic_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_delete_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteTopicRequest( + topic="topic_value", + ) + + # Make the request + await client.delete_topic(request=request) + + +# [END pubsub_v1_generated_Publisher_DeleteTopic_async] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py b/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py new file mode 100644 index 000000000..27b58c27a --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_DeleteTopic_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_delete_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteTopicRequest( + topic="topic_value", + ) + + # Make the request + client.delete_topic(request=request) + + +# [END pubsub_v1_generated_Publisher_DeleteTopic_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py b/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py new file mode 100644 index 000000000..22fb9e7e6 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DetachSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_DetachSubscription_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_detach_subscription(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DetachSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.detach_subscription(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_DetachSubscription_async] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py b/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py new file mode 100644 index 000000000..058c10e73 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DetachSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_DetachSubscription_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_detach_subscription(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.DetachSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.detach_subscription(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_DetachSubscription_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py b/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py new file mode 100644 index 000000000..a8de7a307 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_GetTopic_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_get_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetTopicRequest( + topic="topic_value", + ) + + # Make the request + response = await client.get_topic(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_GetTopic_async] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py b/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py new file mode 100644 index 000000000..d2846a750 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_GetTopic_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_get_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.GetTopicRequest( + topic="topic_value", + ) + + # Make the request + response = client.get_topic(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_GetTopic_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py b/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py new file mode 100644 index 000000000..e8a3e2e8d --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopicSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_ListTopicSnapshots_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_list_topic_snapshots(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSnapshotsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_snapshots(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END pubsub_v1_generated_Publisher_ListTopicSnapshots_async] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py b/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py new file mode 100644 index 000000000..3a51a39b8 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopicSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_ListTopicSnapshots_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_list_topic_snapshots(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSnapshotsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_snapshots(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END pubsub_v1_generated_Publisher_ListTopicSnapshots_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py b/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py new file mode 100644 index 000000000..cbc81e48f --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopicSubscriptions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_ListTopicSubscriptions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_list_topic_subscriptions(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSubscriptionsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_subscriptions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END pubsub_v1_generated_Publisher_ListTopicSubscriptions_async] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py b/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py new file mode 100644 index 000000000..dee0821cd --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopicSubscriptions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_list_topic_subscriptions(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSubscriptionsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_subscriptions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py b/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py new file mode 100644 index 000000000..0fc18583a --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_ListTopics_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_list_topics(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_topics(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END pubsub_v1_generated_Publisher_ListTopics_async] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py b/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py new file mode 100644 index 000000000..2d2a987ee --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_ListTopics_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_list_topics(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_topics(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END pubsub_v1_generated_Publisher_ListTopics_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py b/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py new file mode 100644 index 000000000..536b7f099 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Publish +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_Publish_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_publish(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.PublishRequest( + topic="topic_value", + ) + + # Make the request + response = await client.publish(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_Publish_async] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py b/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py new file mode 100644 index 000000000..e89f90320 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Publish +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_Publish_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_publish(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.PublishRequest( + topic="topic_value", + ) + + # Make the request + response = client.publish(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_Publish_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py b/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py new file mode 100644 index 000000000..a814eab54 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_UpdateTopic_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_update_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + topic = pubsub_v1.Topic() + topic.name = "name_value" + + request = pubsub_v1.UpdateTopicRequest( + topic=topic, + ) + + # Make the request + response = await client.update_topic(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_UpdateTopic_async] diff --git a/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py b/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py new file mode 100644 index 000000000..46c967e4e --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_UpdateTopic_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_update_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + topic = pubsub_v1.Topic() + topic.name = "name_value" + + request = pubsub_v1.UpdateTopicRequest( + topic=topic, + ) + + # Make the request + response = client.update_topic(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_UpdateTopic_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py b/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py new file mode 100644 index 000000000..e24d459c8 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CommitSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_CommitSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_commit_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CommitSchemaRequest( + name="name_value", + schema=schema, + ) + + # Make the request + response = await client.commit_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_CommitSchema_async] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py b/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py new file mode 100644 index 000000000..d3be03abe --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CommitSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_CommitSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_commit_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CommitSchemaRequest( + name="name_value", + schema=schema, + ) + + # Make the request + response = client.commit_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_CommitSchema_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py b/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py new file mode 100644 index 000000000..7eaf44f44 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_CreateSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_create_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CreateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = await client.create_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_CreateSchema_async] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py b/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py new file mode 100644 index 000000000..da7cf76c9 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_CreateSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_create_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CreateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = client.create_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_CreateSchema_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py b/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py new file mode 100644 index 000000000..6fffc7395 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_DeleteSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_delete_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + await client.delete_schema(request=request) + + +# [END pubsub_v1_generated_SchemaService_DeleteSchema_async] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py b/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py new file mode 100644 index 000000000..fa37387cd --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSchemaRevision +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_DeleteSchemaRevision_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_delete_schema_revision(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRevisionRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_schema_revision(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_DeleteSchemaRevision_async] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py b/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py new file mode 100644 index 000000000..4d1ac5e19 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSchemaRevision +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_DeleteSchemaRevision_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_delete_schema_revision(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRevisionRequest( + name="name_value", + ) + + # Make the request + response = client.delete_schema_revision(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_DeleteSchemaRevision_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py b/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py new file mode 100644 index 000000000..64640ba16 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_DeleteSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_delete_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + client.delete_schema(request=request) + + +# [END pubsub_v1_generated_SchemaService_DeleteSchema_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py b/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py new file mode 100644 index 000000000..feb39e86e --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_GetSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_get_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = await client.get_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_GetSchema_async] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py b/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py new file mode 100644 index 000000000..cf387dbcf --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_GetSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_get_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = client.get_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_GetSchema_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py b/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py new file mode 100644 index 000000000..9c2f61ad4 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSchemaRevisions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_ListSchemaRevisions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_list_schema_revisions(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemaRevisionsRequest( + name="name_value", + ) + + # Make the request + page_result = client.list_schema_revisions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END pubsub_v1_generated_SchemaService_ListSchemaRevisions_async] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py b/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py new file mode 100644 index 000000000..08b49520c --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSchemaRevisions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_ListSchemaRevisions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_list_schema_revisions(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemaRevisionsRequest( + name="name_value", + ) + + # Make the request + page_result = client.list_schema_revisions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END pubsub_v1_generated_SchemaService_ListSchemaRevisions_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py b/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py new file mode 100644 index 000000000..7d88f3194 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSchemas +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_ListSchemas_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_list_schemas(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END pubsub_v1_generated_SchemaService_ListSchemas_async] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py b/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py new file mode 100644 index 000000000..776abc3d4 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSchemas +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_ListSchemas_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_list_schemas(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END pubsub_v1_generated_SchemaService_ListSchemas_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py b/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py new file mode 100644 index 000000000..66628743c --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RollbackSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_RollbackSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_rollback_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.RollbackSchemaRequest( + name="name_value", + revision_id="revision_id_value", + ) + + # Make the request + response = await client.rollback_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_RollbackSchema_async] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py b/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py new file mode 100644 index 000000000..2a5d2687d --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RollbackSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_RollbackSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_rollback_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.RollbackSchemaRequest( + name="name_value", + revision_id="revision_id_value", + ) + + # Make the request + response = client.rollback_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_RollbackSchema_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py b/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py new file mode 100644 index 000000000..127b90fec --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ValidateMessage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_ValidateMessage_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_validate_message(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ValidateMessageRequest( + name="name_value", + parent="parent_value", + ) + + # Make the request + response = await client.validate_message(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_ValidateMessage_async] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py b/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py new file mode 100644 index 000000000..08e3b9142 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ValidateMessage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_ValidateMessage_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_validate_message(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ValidateMessageRequest( + name="name_value", + parent="parent_value", + ) + + # Make the request + response = client.validate_message(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_ValidateMessage_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py b/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py new file mode 100644 index 000000000..5cdc6072d --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ValidateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_ValidateSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_validate_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.ValidateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = await client.validate_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_ValidateSchema_async] diff --git a/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py b/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py new file mode 100644 index 000000000..af9792f1e --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ValidateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_ValidateSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_validate_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.ValidateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = client.validate_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_ValidateSchema_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py b/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py new file mode 100644 index 000000000..37ea78fa1 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Acknowledge +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_Acknowledge_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_acknowledge(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.AcknowledgeRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value1', 'ack_ids_value2'], + ) + + # Make the request + await client.acknowledge(request=request) + + +# [END pubsub_v1_generated_Subscriber_Acknowledge_async] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py b/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py new file mode 100644 index 000000000..80cc79a64 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Acknowledge +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_Acknowledge_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_acknowledge(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.AcknowledgeRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value1', 'ack_ids_value2'], + ) + + # Make the request + client.acknowledge(request=request) + + +# [END pubsub_v1_generated_Subscriber_Acknowledge_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py b/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py new file mode 100644 index 000000000..f1084952b --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_CreateSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_create_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", + ) + + # Make the request + response = await client.create_snapshot(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_CreateSnapshot_async] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py b/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py new file mode 100644 index 000000000..207b31599 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_CreateSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_create_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", + ) + + # Make the request + response = client.create_snapshot(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_CreateSnapshot_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py b/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py new file mode 100644 index 000000000..64a7f134d --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_CreateSubscription_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_create_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.Subscription( + name="name_value", + topic="topic_value", + ) + + # Make the request + response = await client.create_subscription(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_CreateSubscription_async] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py b/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py new file mode 100644 index 000000000..7efb7a912 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_CreateSubscription_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_create_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.Subscription( + name="name_value", + topic="topic_value", + ) + + # Make the request + response = client.create_subscription(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_CreateSubscription_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py b/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py new file mode 100644 index 000000000..b92fab270 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_DeleteSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_delete_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + await client.delete_snapshot(request=request) + + +# [END pubsub_v1_generated_Subscriber_DeleteSnapshot_async] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py b/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py new file mode 100644 index 000000000..dd7533eaf --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_DeleteSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_delete_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + client.delete_snapshot(request=request) + + +# [END pubsub_v1_generated_Subscriber_DeleteSnapshot_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py b/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py new file mode 100644 index 000000000..12c85f95e --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_DeleteSubscription_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_delete_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + await client.delete_subscription(request=request) + + +# [END pubsub_v1_generated_Subscriber_DeleteSubscription_async] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py b/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py new file mode 100644 index 000000000..c9285d87e --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_DeleteSubscription_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_delete_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + client.delete_subscription(request=request) + + +# [END pubsub_v1_generated_Subscriber_DeleteSubscription_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py b/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py new file mode 100644 index 000000000..fd22fe023 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_GetSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_get_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + response = await client.get_snapshot(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_GetSnapshot_async] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py b/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py new file mode 100644 index 000000000..a027bcddf --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_GetSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_get_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + response = client.get_snapshot(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_GetSnapshot_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py b/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py new file mode 100644 index 000000000..12eabdec4 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_GetSubscription_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_get_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.get_subscription(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_GetSubscription_async] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py b/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py new file mode 100644 index 000000000..13b7ea626 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_GetSubscription_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_get_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.get_subscription(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_GetSubscription_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py b/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py new file mode 100644 index 000000000..0d3698773 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_ListSnapshots_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_list_snapshots(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSnapshotsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_snapshots(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END pubsub_v1_generated_Subscriber_ListSnapshots_async] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py b/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py new file mode 100644 index 000000000..4568bef48 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_ListSnapshots_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_list_snapshots(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSnapshotsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_snapshots(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END pubsub_v1_generated_Subscriber_ListSnapshots_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py b/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py new file mode 100644 index 000000000..b7811265a --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSubscriptions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_ListSubscriptions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_list_subscriptions(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSubscriptionsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_subscriptions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END pubsub_v1_generated_Subscriber_ListSubscriptions_async] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py b/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py new file mode 100644 index 000000000..5bdc68dd5 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSubscriptions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_ListSubscriptions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_list_subscriptions(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSubscriptionsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_subscriptions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END pubsub_v1_generated_Subscriber_ListSubscriptions_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py b/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py new file mode 100644 index 000000000..4492740cd --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyAckDeadline +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_ModifyAckDeadline_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_modify_ack_deadline(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyAckDeadlineRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value1', 'ack_ids_value2'], + ack_deadline_seconds=2066, + ) + + # Make the request + await client.modify_ack_deadline(request=request) + + +# [END pubsub_v1_generated_Subscriber_ModifyAckDeadline_async] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py b/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py new file mode 100644 index 000000000..d198d4bab --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyAckDeadline +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_modify_ack_deadline(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyAckDeadlineRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value1', 'ack_ids_value2'], + ack_deadline_seconds=2066, + ) + + # Make the request + client.modify_ack_deadline(request=request) + + +# [END pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py b/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py new file mode 100644 index 000000000..155db77c6 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyPushConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_ModifyPushConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_modify_push_config(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyPushConfigRequest( + subscription="subscription_value", + ) + + # Make the request + await client.modify_push_config(request=request) + + +# [END pubsub_v1_generated_Subscriber_ModifyPushConfig_async] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py b/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py new file mode 100644 index 000000000..bca872f9d --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyPushConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_ModifyPushConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_modify_push_config(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyPushConfigRequest( + subscription="subscription_value", + ) + + # Make the request + client.modify_push_config(request=request) + + +# [END pubsub_v1_generated_Subscriber_ModifyPushConfig_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py b/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py new file mode 100644 index 000000000..d351f26cf --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Pull +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_Pull_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_pull(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.PullRequest( + subscription="subscription_value", + max_messages=1277, + ) + + # Make the request + response = await client.pull(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_Pull_async] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py b/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py new file mode 100644 index 000000000..e11007592 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Pull +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_Pull_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_pull(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.PullRequest( + subscription="subscription_value", + max_messages=1277, + ) + + # Make the request + response = client.pull(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_Pull_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py b/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py new file mode 100644 index 000000000..b5eab9a46 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Seek +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_Seek_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_seek(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.SeekRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.seek(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_Seek_async] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py b/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py new file mode 100644 index 000000000..8a0063f66 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Seek +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_Seek_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_seek(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.SeekRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.seek(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_Seek_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py b/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py new file mode 100644 index 000000000..b2ecd899b --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingPull +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_StreamingPull_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_streaming_pull(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.StreamingPullRequest( + subscription="subscription_value", + stream_ack_deadline_seconds=2813, + ) + + # This method expects an iterator which contains + # 'pubsub_v1.StreamingPullRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.streaming_pull(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + +# [END pubsub_v1_generated_Subscriber_StreamingPull_async] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py b/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py new file mode 100644 index 000000000..2de009269 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingPull +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_StreamingPull_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_streaming_pull(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.StreamingPullRequest( + subscription="subscription_value", + stream_ack_deadline_seconds=2813, + ) + + # This method expects an iterator which contains + # 'pubsub_v1.StreamingPullRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_pull(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + +# [END pubsub_v1_generated_Subscriber_StreamingPull_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py b/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py new file mode 100644 index 000000000..7aa873ec1 --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_UpdateSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_update_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.UpdateSnapshotRequest( + ) + + # Make the request + response = await client.update_snapshot(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_UpdateSnapshot_async] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py b/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py new file mode 100644 index 000000000..7cb4af13e --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_UpdateSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_update_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.UpdateSnapshotRequest( + ) + + # Make the request + response = client.update_snapshot(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_UpdateSnapshot_sync] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py b/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py new file mode 100644 index 000000000..ed6a5512b --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_UpdateSubscription_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_update_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + subscription = pubsub_v1.Subscription() + subscription.name = "name_value" + subscription.topic = "topic_value" + + request = pubsub_v1.UpdateSubscriptionRequest( + subscription=subscription, + ) + + # Make the request + response = await client.update_subscription(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_UpdateSubscription_async] diff --git a/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py b/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py new file mode 100644 index 000000000..a592001ec --- /dev/null +++ b/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_UpdateSubscription_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_update_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + subscription = pubsub_v1.Subscription() + subscription.name = "name_value" + subscription.topic = "topic_value" + + request = pubsub_v1.UpdateSubscriptionRequest( + subscription=subscription, + ) + + # Make the request + response = client.update_subscription(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_UpdateSubscription_sync] diff --git a/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/samples/generated_samples/snippet_metadata_google.pubsub.v1.json new file mode 100644 index 000000000..f3af602ab --- /dev/null +++ b/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -0,0 +1,5736 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.pubsub.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-pubsub", + "version": "2.34.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.create_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.CreateTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "CreateTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.Topic" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "create_topic" + }, + "description": "Sample for CreateTopic", + "file": "pubsub_v1_generated_publisher_create_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_CreateTopic_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_create_topic_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.create_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.CreateTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "CreateTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.Topic" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "create_topic" + }, + "description": "Sample for CreateTopic", + "file": "pubsub_v1_generated_publisher_create_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_CreateTopic_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_create_topic_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.delete_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.DeleteTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "DeleteTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteTopicRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_topic" + }, + "description": "Sample for DeleteTopic", + "file": "pubsub_v1_generated_publisher_delete_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_DeleteTopic_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_delete_topic_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.delete_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.DeleteTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "DeleteTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteTopicRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_topic" + }, + "description": "Sample for DeleteTopic", + "file": "pubsub_v1_generated_publisher_delete_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_DeleteTopic_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_delete_topic_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.detach_subscription", + "method": { + "fullName": "google.pubsub.v1.Publisher.DetachSubscription", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "DetachSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DetachSubscriptionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.DetachSubscriptionResponse", + "shortName": "detach_subscription" + }, + "description": "Sample for DetachSubscription", + "file": "pubsub_v1_generated_publisher_detach_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_DetachSubscription_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_detach_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.detach_subscription", + "method": { + "fullName": "google.pubsub.v1.Publisher.DetachSubscription", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "DetachSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DetachSubscriptionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.DetachSubscriptionResponse", + "shortName": "detach_subscription" + }, + "description": "Sample for DetachSubscription", + "file": "pubsub_v1_generated_publisher_detach_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_DetachSubscription_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_detach_subscription_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.get_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.GetTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "GetTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetTopicRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "get_topic" + }, + "description": "Sample for GetTopic", + "file": "pubsub_v1_generated_publisher_get_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_GetTopic_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_get_topic_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.get_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.GetTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "GetTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetTopicRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "get_topic" + }, + "description": "Sample for GetTopic", + "file": "pubsub_v1_generated_publisher_get_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_GetTopic_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_get_topic_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topic_snapshots", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopicSnapshots", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopicSnapshots" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicSnapshotsRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsAsyncPager", + "shortName": "list_topic_snapshots" + }, + "description": "Sample for ListTopicSnapshots", + "file": "pubsub_v1_generated_publisher_list_topic_snapshots_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopicSnapshots_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topic_snapshots_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.list_topic_snapshots", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopicSnapshots", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopicSnapshots" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicSnapshotsRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsPager", + "shortName": "list_topic_snapshots" + }, + "description": "Sample for ListTopicSnapshots", + "file": "pubsub_v1_generated_publisher_list_topic_snapshots_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopicSnapshots_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topic_snapshots_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topic_subscriptions", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopicSubscriptions", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopicSubscriptions" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicSubscriptionsRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsAsyncPager", + "shortName": "list_topic_subscriptions" + }, + "description": "Sample for ListTopicSubscriptions", + "file": "pubsub_v1_generated_publisher_list_topic_subscriptions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopicSubscriptions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topic_subscriptions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.list_topic_subscriptions", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopicSubscriptions", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopicSubscriptions" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicSubscriptionsRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsPager", + "shortName": "list_topic_subscriptions" + }, + "description": "Sample for ListTopicSubscriptions", + "file": "pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topics", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopics", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopics" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicsAsyncPager", + "shortName": "list_topics" + }, + "description": "Sample for ListTopics", + "file": "pubsub_v1_generated_publisher_list_topics_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopics_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topics_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.list_topics", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopics", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopics" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicsPager", + "shortName": "list_topics" + }, + "description": "Sample for ListTopics", + "file": "pubsub_v1_generated_publisher_list_topics_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopics_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topics_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.publish", + "method": { + "fullName": "google.pubsub.v1.Publisher.Publish", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "Publish" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.PublishRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "messages", + "type": "MutableSequence[google.pubsub_v1.types.PubsubMessage]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.PublishResponse", + "shortName": "publish" + }, + "description": "Sample for Publish", + "file": "pubsub_v1_generated_publisher_publish_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_Publish_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_publish_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.publish", + "method": { + "fullName": "google.pubsub.v1.Publisher.Publish", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "Publish" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.PublishRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "messages", + "type": "MutableSequence[google.pubsub_v1.types.PubsubMessage]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.PublishResponse", + "shortName": "publish" + }, + "description": "Sample for Publish", + "file": "pubsub_v1_generated_publisher_publish_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_Publish_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_publish_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.update_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.UpdateTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "UpdateTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateTopicRequest" + }, + { + "name": "topic", + "type": "google.pubsub_v1.types.Topic" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "update_topic" + }, + "description": "Sample for UpdateTopic", + "file": "pubsub_v1_generated_publisher_update_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_UpdateTopic_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_update_topic_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.update_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.UpdateTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "UpdateTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateTopicRequest" + }, + { + "name": "topic", + "type": "google.pubsub_v1.types.Topic" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "update_topic" + }, + "description": "Sample for UpdateTopic", + "file": "pubsub_v1_generated_publisher_update_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_UpdateTopic_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_update_topic_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.commit_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.CommitSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "CommitSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CommitSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "commit_schema" + }, + "description": "Sample for CommitSchema", + "file": "pubsub_v1_generated_schema_service_commit_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_CommitSchema_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_commit_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.commit_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.CommitSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "CommitSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CommitSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "commit_schema" + }, + "description": "Sample for CommitSchema", + "file": "pubsub_v1_generated_schema_service_commit_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_CommitSchema_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_commit_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.create_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.CreateSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "CreateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CreateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "schema_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "create_schema" + }, + "description": "Sample for CreateSchema", + "file": "pubsub_v1_generated_schema_service_create_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_create_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.create_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.CreateSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "CreateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CreateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "schema_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "create_schema" + }, + "description": "Sample for CreateSchema", + "file": "pubsub_v1_generated_schema_service_create_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_create_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.delete_schema_revision", + "method": { + "fullName": "google.pubsub.v1.SchemaService.DeleteSchemaRevision", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "DeleteSchemaRevision" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSchemaRevisionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "revision_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "delete_schema_revision" + }, + "description": "Sample for DeleteSchemaRevision", + "file": "pubsub_v1_generated_schema_service_delete_schema_revision_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchemaRevision_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_delete_schema_revision_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.delete_schema_revision", + "method": { + "fullName": "google.pubsub.v1.SchemaService.DeleteSchemaRevision", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "DeleteSchemaRevision" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSchemaRevisionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "revision_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "delete_schema_revision" + }, + "description": "Sample for DeleteSchemaRevision", + "file": "pubsub_v1_generated_schema_service_delete_schema_revision_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchemaRevision_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_delete_schema_revision_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.delete_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.DeleteSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "DeleteSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_schema" + }, + "description": "Sample for DeleteSchema", + "file": "pubsub_v1_generated_schema_service_delete_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_delete_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.delete_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.DeleteSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "DeleteSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_schema" + }, + "description": "Sample for DeleteSchema", + "file": "pubsub_v1_generated_schema_service_delete_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_delete_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.get_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.GetSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "get_schema" + }, + "description": "Sample for GetSchema", + "file": "pubsub_v1_generated_schema_service_get_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_get_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.get_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.GetSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "get_schema" + }, + "description": "Sample for GetSchema", + "file": "pubsub_v1_generated_schema_service_get_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_get_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.list_schema_revisions", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ListSchemaRevisions", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemaRevisions" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSchemaRevisionsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemaRevisionsAsyncPager", + "shortName": "list_schema_revisions" + }, + "description": "Sample for ListSchemaRevisions", + "file": "pubsub_v1_generated_schema_service_list_schema_revisions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ListSchemaRevisions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_list_schema_revisions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.list_schema_revisions", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ListSchemaRevisions", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemaRevisions" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSchemaRevisionsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemaRevisionsPager", + "shortName": "list_schema_revisions" + }, + "description": "Sample for ListSchemaRevisions", + "file": "pubsub_v1_generated_schema_service_list_schema_revisions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ListSchemaRevisions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_list_schema_revisions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.list_schemas", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ListSchemas", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasAsyncPager", + "shortName": "list_schemas" + }, + "description": "Sample for ListSchemas", + "file": "pubsub_v1_generated_schema_service_list_schemas_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_list_schemas_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.list_schemas", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ListSchemas", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasPager", + "shortName": "list_schemas" + }, + "description": "Sample for ListSchemas", + "file": "pubsub_v1_generated_schema_service_list_schemas_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_list_schemas_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.rollback_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.RollbackSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "RollbackSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.RollbackSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "revision_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "rollback_schema" + }, + "description": "Sample for RollbackSchema", + "file": "pubsub_v1_generated_schema_service_rollback_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_RollbackSchema_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_rollback_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.rollback_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.RollbackSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "RollbackSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.RollbackSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "revision_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "rollback_schema" + }, + "description": "Sample for RollbackSchema", + "file": "pubsub_v1_generated_schema_service_rollback_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_RollbackSchema_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_rollback_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.validate_message", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ValidateMessage", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ValidateMessage" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ValidateMessageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.ValidateMessageResponse", + "shortName": "validate_message" + }, + "description": "Sample for ValidateMessage", + "file": "pubsub_v1_generated_schema_service_validate_message_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ValidateMessage_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_validate_message_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.validate_message", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ValidateMessage", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ValidateMessage" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ValidateMessageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.ValidateMessageResponse", + "shortName": "validate_message" + }, + "description": "Sample for ValidateMessage", + "file": "pubsub_v1_generated_schema_service_validate_message_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ValidateMessage_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_validate_message_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.validate_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ValidateSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ValidateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ValidateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.ValidateSchemaResponse", + "shortName": "validate_schema" + }, + "description": "Sample for ValidateSchema", + "file": "pubsub_v1_generated_schema_service_validate_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ValidateSchema_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_validate_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.validate_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ValidateSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ValidateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ValidateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.ValidateSchemaResponse", + "shortName": "validate_schema" + }, + "description": "Sample for ValidateSchema", + "file": "pubsub_v1_generated_schema_service_validate_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ValidateSchema_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_validate_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.acknowledge", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Acknowledge", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Acknowledge" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.AcknowledgeRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "ack_ids", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "acknowledge" + }, + "description": "Sample for Acknowledge", + "file": "pubsub_v1_generated_subscriber_acknowledge_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Acknowledge_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_acknowledge_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.acknowledge", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Acknowledge", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Acknowledge" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.AcknowledgeRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "ack_ids", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "acknowledge" + }, + "description": "Sample for Acknowledge", + "file": "pubsub_v1_generated_subscriber_acknowledge_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Acknowledge_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_acknowledge_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.create_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.CreateSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "CreateSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CreateSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "create_snapshot" + }, + "description": "Sample for CreateSnapshot", + "file": "pubsub_v1_generated_subscriber_create_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_CreateSnapshot_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_create_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.create_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.CreateSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "CreateSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CreateSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "create_snapshot" + }, + "description": "Sample for CreateSnapshot", + "file": "pubsub_v1_generated_subscriber_create_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_CreateSnapshot_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_create_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.create_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.CreateSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "CreateSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.Subscription" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "push_config", + "type": "google.pubsub_v1.types.PushConfig" + }, + { + "name": "ack_deadline_seconds", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "create_subscription" + }, + "description": "Sample for CreateSubscription", + "file": "pubsub_v1_generated_subscriber_create_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_CreateSubscription_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_create_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.create_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.CreateSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "CreateSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.Subscription" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "push_config", + "type": "google.pubsub_v1.types.PushConfig" + }, + { + "name": "ack_deadline_seconds", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "create_subscription" + }, + "description": "Sample for CreateSubscription", + "file": "pubsub_v1_generated_subscriber_create_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_CreateSubscription_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_create_subscription_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.delete_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.DeleteSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "DeleteSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSnapshotRequest" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_snapshot" + }, + "description": "Sample for DeleteSnapshot", + "file": "pubsub_v1_generated_subscriber_delete_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_DeleteSnapshot_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_delete_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.delete_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.DeleteSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "DeleteSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSnapshotRequest" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_snapshot" + }, + "description": "Sample for DeleteSnapshot", + "file": "pubsub_v1_generated_subscriber_delete_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_DeleteSnapshot_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_delete_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.delete_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.DeleteSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "DeleteSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSubscriptionRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_subscription" + }, + "description": "Sample for DeleteSubscription", + "file": "pubsub_v1_generated_subscriber_delete_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_DeleteSubscription_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_delete_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.delete_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.DeleteSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "DeleteSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSubscriptionRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_subscription" + }, + "description": "Sample for DeleteSubscription", + "file": "pubsub_v1_generated_subscriber_delete_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_DeleteSubscription_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_delete_subscription_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.get_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.GetSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "GetSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSnapshotRequest" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "get_snapshot" + }, + "description": "Sample for GetSnapshot", + "file": "pubsub_v1_generated_subscriber_get_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_GetSnapshot_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_get_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.get_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.GetSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "GetSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSnapshotRequest" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "get_snapshot" + }, + "description": "Sample for GetSnapshot", + "file": "pubsub_v1_generated_subscriber_get_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_GetSnapshot_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_get_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.get_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.GetSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "GetSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSubscriptionRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "get_subscription" + }, + "description": "Sample for GetSubscription", + "file": "pubsub_v1_generated_subscriber_get_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_GetSubscription_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_get_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.get_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.GetSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "GetSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSubscriptionRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "get_subscription" + }, + "description": "Sample for GetSubscription", + "file": "pubsub_v1_generated_subscriber_get_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_GetSubscription_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_get_subscription_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.list_snapshots", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ListSnapshots", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ListSnapshots" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSnapshotsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSnapshotsAsyncPager", + "shortName": "list_snapshots" + }, + "description": "Sample for ListSnapshots", + "file": "pubsub_v1_generated_subscriber_list_snapshots_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ListSnapshots_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_list_snapshots_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.list_snapshots", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ListSnapshots", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ListSnapshots" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSnapshotsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSnapshotsPager", + "shortName": "list_snapshots" + }, + "description": "Sample for ListSnapshots", + "file": "pubsub_v1_generated_subscriber_list_snapshots_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ListSnapshots_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_list_snapshots_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.list_subscriptions", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ListSubscriptions", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ListSubscriptions" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSubscriptionsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsAsyncPager", + "shortName": "list_subscriptions" + }, + "description": "Sample for ListSubscriptions", + "file": "pubsub_v1_generated_subscriber_list_subscriptions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ListSubscriptions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_list_subscriptions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.list_subscriptions", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ListSubscriptions", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ListSubscriptions" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSubscriptionsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsPager", + "shortName": "list_subscriptions" + }, + "description": "Sample for ListSubscriptions", + "file": "pubsub_v1_generated_subscriber_list_subscriptions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ListSubscriptions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_list_subscriptions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.modify_ack_deadline", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ModifyAckDeadline", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ModifyAckDeadline" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ModifyAckDeadlineRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "ack_ids", + "type": "MutableSequence[str]" + }, + { + "name": "ack_deadline_seconds", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "modify_ack_deadline" + }, + "description": "Sample for ModifyAckDeadline", + "file": "pubsub_v1_generated_subscriber_modify_ack_deadline_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ModifyAckDeadline_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_modify_ack_deadline_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.modify_ack_deadline", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ModifyAckDeadline", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ModifyAckDeadline" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ModifyAckDeadlineRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "ack_ids", + "type": "MutableSequence[str]" + }, + { + "name": "ack_deadline_seconds", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "modify_ack_deadline" + }, + "description": "Sample for ModifyAckDeadline", + "file": "pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.modify_push_config", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ModifyPushConfig", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ModifyPushConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ModifyPushConfigRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "push_config", + "type": "google.pubsub_v1.types.PushConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "modify_push_config" + }, + "description": "Sample for ModifyPushConfig", + "file": "pubsub_v1_generated_subscriber_modify_push_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ModifyPushConfig_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_modify_push_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.modify_push_config", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ModifyPushConfig", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ModifyPushConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ModifyPushConfigRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "push_config", + "type": "google.pubsub_v1.types.PushConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "modify_push_config" + }, + "description": "Sample for ModifyPushConfig", + "file": "pubsub_v1_generated_subscriber_modify_push_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ModifyPushConfig_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_modify_push_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.pull", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Pull", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Pull" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.PullRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "return_immediately", + "type": "bool" + }, + { + "name": "max_messages", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.PullResponse", + "shortName": "pull" + }, + "description": "Sample for Pull", + "file": "pubsub_v1_generated_subscriber_pull_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Pull_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_pull_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.pull", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Pull", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Pull" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.PullRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "return_immediately", + "type": "bool" + }, + { + "name": "max_messages", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.PullResponse", + "shortName": "pull" + }, + "description": "Sample for Pull", + "file": "pubsub_v1_generated_subscriber_pull_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Pull_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_pull_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.seek", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Seek", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Seek" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.SeekRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.SeekResponse", + "shortName": "seek" + }, + "description": "Sample for Seek", + "file": "pubsub_v1_generated_subscriber_seek_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Seek_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_seek_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.seek", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Seek", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Seek" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.SeekRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.SeekResponse", + "shortName": "seek" + }, + "description": "Sample for Seek", + "file": "pubsub_v1_generated_subscriber_seek_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Seek_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_seek_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.streaming_pull", + "method": { + "fullName": "google.pubsub.v1.Subscriber.StreamingPull", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "StreamingPull" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.pubsub_v1.types.StreamingPullRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.pubsub_v1.types.StreamingPullResponse]", + "shortName": "streaming_pull" + }, + "description": "Sample for StreamingPull", + "file": "pubsub_v1_generated_subscriber_streaming_pull_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_StreamingPull_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 56, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 57, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_streaming_pull_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.streaming_pull", + "method": { + "fullName": "google.pubsub.v1.Subscriber.StreamingPull", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "StreamingPull" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.pubsub_v1.types.StreamingPullRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.pubsub_v1.types.StreamingPullResponse]", + "shortName": "streaming_pull" + }, + "description": "Sample for StreamingPull", + "file": "pubsub_v1_generated_subscriber_streaming_pull_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_StreamingPull_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 56, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 57, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_streaming_pull_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.update_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.UpdateSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "UpdateSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateSnapshotRequest" + }, + { + "name": "snapshot", + "type": "google.pubsub_v1.types.Snapshot" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "update_snapshot" + }, + "description": "Sample for UpdateSnapshot", + "file": "pubsub_v1_generated_subscriber_update_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_UpdateSnapshot_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_update_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.update_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.UpdateSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "UpdateSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateSnapshotRequest" + }, + { + "name": "snapshot", + "type": "google.pubsub_v1.types.Snapshot" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "update_snapshot" + }, + "description": "Sample for UpdateSnapshot", + "file": "pubsub_v1_generated_subscriber_update_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_UpdateSnapshot_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_update_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.update_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.UpdateSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "UpdateSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateSubscriptionRequest" + }, + { + "name": "subscription", + "type": "google.pubsub_v1.types.Subscription" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "update_subscription" + }, + "description": "Sample for UpdateSubscription", + "file": "pubsub_v1_generated_subscriber_update_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_UpdateSubscription_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_update_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.update_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.UpdateSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "UpdateSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateSubscriptionRequest" + }, + { + "name": "subscription", + "type": "google.pubsub_v1.types.Subscription" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "update_subscription" + }, + "description": "Sample for UpdateSubscription", + "file": "pubsub_v1_generated_subscriber_update_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_UpdateSubscription_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_update_subscription_sync.py" + } + ] +} diff --git a/samples/snippets/README.rst b/samples/snippets/README.rst index 2676680af..5c1d4be68 100644 --- a/samples/snippets/README.rst +++ b/samples/snippets/README.rst @@ -1,11 +1,10 @@ - .. This file is automatically generated. Do not edit this file directly. Google Cloud Pub/Sub Python Samples =============================================================================== .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/python-pubsub&page=editor&open_in_editor=samples/snippets/README.rst This directory contains samples for Google Cloud Pub/Sub. `Google Cloud Pub/Sub`_ is a fully-managed real-time messaging service that allows you to send and receive messages between independent applications. @@ -16,11 +15,13 @@ This directory contains samples for Google Cloud Pub/Sub. `Google Cloud Pub/Sub` .. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/docs + + + Setup ------------------------------------------------------------------------------- - Authentication ++++++++++++++ @@ -31,9 +32,6 @@ credentials for applications. .. _Authentication Getting Started Guide: https://cloud.google.com/docs/authentication/getting-started - - - Install Dependencies ++++++++++++++++++++ @@ -41,7 +39,7 @@ Install Dependencies .. code-block:: bash - $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + $ git clone https://github.com/googleapis/python-pubsub.git #. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. @@ -64,20 +62,30 @@ Install Dependencies .. _pip: https://pip.pypa.io/ .. _virtualenv: https://virtualenv.pypa.io/ +Samples +------------------------------------------------------------------------------- + +Quickstart (Publisher) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com//googleapis/python-pubsub&page=editor&open_in_editor=samples/snippets/quickstart/pub.py,samples/snippets/README.rst +To run this sample: -Samples -------------------------------------------------------------------------------- +.. code-block:: bash + $ python3 quickstart/pub.py -Quickstart + +Quickstart (Subscriber) +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/quickstart.py,pubsub/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com//googleapis/python-pubsub&page=editor&open_in_editor=samples/snippets/quickstart/sub.py,samples/snippets/README.rst @@ -86,16 +94,14 @@ To run this sample: .. code-block:: bash - $ python quickstart.py - - + $ python3 quickstart/sub.py Publisher +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/publisher.py,pubsub/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com//googleapis/python-pubsub&page=editor&open_in_editor=samples/snippets/publisher.py,samples/snippets/README.rst @@ -104,12 +110,11 @@ To run this sample: .. code-block:: bash - $ python publisher.py - + $ python3 publisher.py usage: publisher.py [-h] project_id - {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings} + {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings,publish-with-ordering-keys,resume-publish-with-ordering-keys,detach-subscription} ... This application demonstrates how to perform basic operations on topics @@ -120,7 +125,7 @@ To run this sample: positional arguments: project_id Your Google Cloud project ID - {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings} + {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings,publish-with-ordering-keys,resume-publish-with-ordering-keys,detach-subscription} list Lists all Pub/Sub topics in the given project. create Create a new Pub/Sub topic. delete Deletes an existing Pub/Sub topic. @@ -136,19 +141,25 @@ To run this sample: batch settings. publish-with-retry-settings Publishes messages with custom retry settings. + publish-with-ordering-keys + Publishes messages with ordering keys. + resume-publish-with-ordering-keys + Resume publishing messages with ordering keys when + unrecoverable errors occur. + detach-subscription + Detaches a subscription from a topic and drops all + messages retained in it. optional arguments: -h, --help show this help message and exit - - Subscribers +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/subscriber.py,pubsub/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com//googleapis/python-pubsub&page=editor&open_in_editor=samples/snippets/subscriber.py,samples/snippets/README.rst @@ -157,12 +168,11 @@ To run this sample: .. code-block:: bash - $ python subscriber.py - + $ python3 subscriber.py usage: subscriber.py [-h] project_id - {list-in-topic,list-in-project,create,create-with-dead-letter-policy,create-push,delete,update-push,update-dead-letter-policy,remove-dead-letter-policy,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen-for-errors,receive-messages-with-delivery-attempts} + {list-in-topic,list-in-project,create,create-with-dead-letter-policy,create-push,create-with-ordering,delete,update-push,update-dead-letter-policy,remove-dead-letter-policy,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen-for-errors,receive-messages-with-delivery-attempts} ... This application demonstrates how to perform basic operations on @@ -173,13 +183,15 @@ To run this sample: positional arguments: project_id Your Google Cloud project ID - {list-in-topic,list-in-project,create,create-with-dead-letter-policy,create-push,delete,update-push,update-dead-letter-policy,remove-dead-letter-policy,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen-for-errors,receive-messages-with-delivery-attempts} + {list-in-topic,list-in-project,create,create-with-dead-letter-policy,create-push,create-with-ordering,delete,update-push,update-dead-letter-policy,remove-dead-letter-policy,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen-for-errors,receive-messages-with-delivery-attempts} list-in-topic Lists all subscriptions for a given topic. list-in-project Lists all subscriptions in the current project. create Create a new pull subscription on the given topic. create-with-dead-letter-policy Create a subscription with dead letter policy. create-push Create a new push subscription on the given topic. + create-with-ordering + Create a subscription with dead letter policy. delete Deletes an existing Pub/Sub topic. update-push Updates an existing Pub/Sub subscription's push endpoint URL. Note that certain properties of a @@ -207,13 +219,11 @@ To run this sample: - - Identity and Access Management +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/iam.py,pubsub/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com//googleapis/python-pubsub&page=editor&open_in_editor=samples/snippets/iam.py,samples/snippets/README.rst @@ -222,11 +232,10 @@ To run this sample: .. code-block:: bash - $ python iam.py - + $ python3 iam.py usage: iam.py [-h] - project + project_id {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} ... @@ -237,7 +246,7 @@ To run this sample: at https://cloud.google.com/pubsub/docs. positional arguments: - project Your Google Cloud project ID + project_id Your Google Cloud project ID {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} get-topic-policy Prints the IAM policy for the given topic. get-subscription-policy @@ -257,10 +266,48 @@ To run this sample: +Schema ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com//googleapis/python-pubsub&page=editor&open_in_editor=samples/snippets/schema.py,samples/snippets/README.rst +To run this sample: + +.. code-block:: bash + + $ python3 schema.py + + usage: schema.py [-h] + project_id + {create-avro,create-proto,get,list,delete,create-topic,publish-avro,publish-proto,receive-avro,receive-proto} + ... + + This application demonstrates how to perform basic schema operations + using the Cloud Pub/Sub API. + + For more information, see the README.md under /pubsub and the documentation + at https://cloud.google.com/pubsub/docs. + + positional arguments: + project_id Your Google Cloud project ID + {create-avro,create-proto,get,list,delete,create-topic,publish-avro,publish-proto,receive-avro,receive-proto} + create-avro Create a schema resource from an Avro schema file formatted in JSON. + create-proto Create a schema resource from a protobuf schema file. + get Get a schema resource. + list List schema resources. + delete Delete a schema resource. + create-topic Create a topic resource with a schema. + publish-avro Pulbish a BINARY or JSON encoded message to a topic configured with an Avro schema. + publish-proto Publish a BINARY or JSON encoded message to a topic configured with a protobuf schema. + receive-avro Receive and decode messages sent to a topic with an Avro schema. + receive-proto Receive and decode messages sent to a topic with a protobuf schema. + + optional arguments: + -h, --help show this help message and exit The client library @@ -278,5 +325,4 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/samples/snippets/README.rst.in b/samples/snippets/README.rst.in index ddbc64712..b0e98cbeb 100644 --- a/samples/snippets/README.rst.in +++ b/samples/snippets/README.rst.in @@ -13,8 +13,10 @@ setup: - install_deps samples: -- name: Quickstart - file: quickstart.py +- name: Quickstart (Publisher) + file: quickstart/pub.py +- name: Quickstart (Subscriber) + file: quickstart/sub.py - name: Publisher file: publisher.py show_help: true @@ -27,4 +29,4 @@ samples: cloud_client_library: true -folder: pubsub/cloud-client \ No newline at end of file +folder: samples/snippets \ No newline at end of file diff --git a/samples/snippets/iam.py b/samples/snippets/iam.py index 71c55d764..aaf024864 100644 --- a/samples/snippets/iam.py +++ b/samples/snippets/iam.py @@ -24,19 +24,19 @@ import argparse -def get_topic_policy(project, topic_id): +def get_topic_policy(project_id: str, topic_id: str) -> None: """Prints the IAM policy for the given topic.""" # [START pubsub_get_topic_policy] from google.cloud import pubsub_v1 - # TODO(developer) + # TODO(developer): Choose an existing topic. # project_id = "your-project-id" # topic_id = "your-topic-id" client = pubsub_v1.PublisherClient() - topic_path = client.topic_path(project, topic_id) + topic_path = client.topic_path(project_id, topic_id) - policy = client.get_iam_policy(topic_path) + policy = client.get_iam_policy(request={"resource": topic_path}) print("Policy for topic {}:".format(topic_path)) for binding in policy.bindings: @@ -44,19 +44,19 @@ def get_topic_policy(project, topic_id): # [END pubsub_get_topic_policy] -def get_subscription_policy(project, subscription_id): +def get_subscription_policy(project_id: str, subscription_id: str) -> None: """Prints the IAM policy for the given subscription.""" # [START pubsub_get_subscription_policy] from google.cloud import pubsub_v1 - # TODO(developer) + # TODO(developer): Choose an existing subscription. # project_id = "your-project-id" # subscription_id = "your-subscription-id" client = pubsub_v1.SubscriberClient() - subscription_path = client.subscription_path(project, subscription_id) + subscription_path = client.subscription_path(project_id, subscription_id) - policy = client.get_iam_policy(subscription_path) + policy = client.get_iam_policy(request={"resource": subscription_path}) print("Policy for subscription {}:".format(subscription_path)) for binding in policy.bindings: @@ -66,22 +66,22 @@ def get_subscription_policy(project, subscription_id): # [END pubsub_get_subscription_policy] -def set_topic_policy(project, topic_id): +def set_topic_policy(project_id: str, topic_id: str) -> None: """Sets the IAM policy for a topic.""" # [START pubsub_set_topic_policy] from google.cloud import pubsub_v1 - # TODO(developer) + # TODO(developer): Choose an existing topic. # project_id = "your-project-id" # topic_id = "your-topic-id" client = pubsub_v1.PublisherClient() - topic_path = client.topic_path(project, topic_id) + topic_path = client.topic_path(project_id, topic_id) - policy = client.get_iam_policy(topic_path) + policy = client.get_iam_policy(request={"resource": topic_path}) # Add all users as viewers. - policy.bindings.add(role="roles/pubsub.viewer", members=["allUsers"]) + policy.bindings.add(role="roles/pubsub.viewer", members=["domain:google.com"]) # Add a group as a publisher. policy.bindings.add( @@ -89,34 +89,36 @@ def set_topic_policy(project, topic_id): ) # Set the policy - policy = client.set_iam_policy(topic_path, policy) + policy = client.set_iam_policy(request={"resource": topic_path, "policy": policy}) print("IAM policy for topic {} set: {}".format(topic_id, policy)) # [END pubsub_set_topic_policy] -def set_subscription_policy(project, subscription_id): +def set_subscription_policy(project_id: str, subscription_id: str) -> None: """Sets the IAM policy for a topic.""" # [START pubsub_set_subscription_policy] from google.cloud import pubsub_v1 - # TODO(developer) + # TODO(developer): Choose an existing subscription. # project_id = "your-project-id" # subscription_id = "your-subscription-id" client = pubsub_v1.SubscriberClient() - subscription_path = client.subscription_path(project, subscription_id) + subscription_path = client.subscription_path(project_id, subscription_id) - policy = client.get_iam_policy(subscription_path) + policy = client.get_iam_policy(request={"resource": subscription_path}) # Add all users as viewers. - policy.bindings.add(role="roles/pubsub.viewer", members=["allUsers"]) + policy.bindings.add(role="roles/pubsub.viewer", members=["domain:google.com"]) # Add a group as an editor. policy.bindings.add(role="roles/editor", members=["group:cloud-logs@google.com"]) # Set the policy - policy = client.set_iam_policy(subscription_path, policy) + policy = client.set_iam_policy( + request={"resource": subscription_path, "policy": policy} + ) print("IAM policy for subscription {} set: {}".format(subscription_id, policy)) @@ -124,21 +126,23 @@ def set_subscription_policy(project, subscription_id): # [END pubsub_set_subscription_policy] -def check_topic_permissions(project, topic_id): +def check_topic_permissions(project_id: str, topic_id: str) -> None: """Checks to which permissions are available on the given topic.""" # [START pubsub_test_topic_permissions] from google.cloud import pubsub_v1 - # TODO(developer) + # TODO(developer): Choose an existing topic. # project_id = "your-project-id" # topic_id = "your-topic-id" client = pubsub_v1.PublisherClient() - topic_path = client.topic_path(project, topic_id) + topic_path = client.topic_path(project_id, topic_id) permissions_to_check = ["pubsub.topics.publish", "pubsub.topics.update"] - allowed_permissions = client.test_iam_permissions(topic_path, permissions_to_check) + allowed_permissions = client.test_iam_permissions( + request={"resource": topic_path, "permissions": permissions_to_check} + ) print( "Allowed permissions for topic {}: {}".format(topic_path, allowed_permissions) @@ -146,17 +150,17 @@ def check_topic_permissions(project, topic_id): # [END pubsub_test_topic_permissions] -def check_subscription_permissions(project, subscription_id): +def check_subscription_permissions(project_id: str, subscription_id: str) -> None: """Checks to which permissions are available on the given subscription.""" # [START pubsub_test_subscription_permissions] from google.cloud import pubsub_v1 - # TODO(developer) + # TODO(developer): Choose an existing subscription. # project_id = "your-project-id" # subscription_id = "your-subscription-id" client = pubsub_v1.SubscriberClient() - subscription_path = client.subscription_path(project, subscription_id) + subscription_path = client.subscription_path(project_id, subscription_id) permissions_to_check = [ "pubsub.subscriptions.consume", @@ -164,7 +168,7 @@ def check_subscription_permissions(project, subscription_id): ] allowed_permissions = client.test_iam_permissions( - subscription_path, permissions_to_check + request={"resource": subscription_path, "permissions": permissions_to_check} ) print( @@ -179,9 +183,10 @@ def check_subscription_permissions(project, subscription_id): if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, ) - parser.add_argument("project", help="Your Google Cloud project ID") + parser.add_argument("project_id", help="Your Google Cloud project ID") subparsers = parser.add_subparsers(dest="command") @@ -211,21 +216,22 @@ def check_subscription_permissions(project, subscription_id): check_topic_permissions_parser.add_argument("topic_id") check_subscription_permissions_parser = subparsers.add_parser( - "check-subscription-permissions", help=check_subscription_permissions.__doc__, + "check-subscription-permissions", + help=check_subscription_permissions.__doc__, ) check_subscription_permissions_parser.add_argument("subscription_id") args = parser.parse_args() if args.command == "get-topic-policy": - get_topic_policy(args.project, args.topic_id) + get_topic_policy(args.project_id, args.topic_id) elif args.command == "get-subscription-policy": - get_subscription_policy(args.project, args.subscription_id) + get_subscription_policy(args.project_id, args.subscription_id) elif args.command == "set-topic-policy": - set_topic_policy(args.project, args.topic_id) + set_topic_policy(args.project_id, args.topic_id) elif args.command == "set-subscription-policy": - set_subscription_policy(args.project, args.subscription_id) + set_subscription_policy(args.project_id, args.subscription_id) elif args.command == "check-topic-permissions": - check_topic_permissions(args.project, args.topic_id) + check_topic_permissions(args.project_id, args.topic_id) elif args.command == "check-subscription-permissions": - check_subscription_permissions(args.project, args.subscription_id) + check_subscription_permissions(args.project_id, args.subscription_id) diff --git a/samples/snippets/iam_test.py b/samples/snippets/iam_test.py index d196953f6..c1289ad39 100644 --- a/samples/snippets/iam_test.py +++ b/samples/snippets/iam_test.py @@ -13,106 +13,117 @@ # limitations under the License. import os +from typing import Generator import uuid +from _pytest.capture import CaptureFixture +from google.api_core.exceptions import NotFound from google.cloud import pubsub_v1 import pytest import iam UUID = uuid.uuid4().hex -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] -TOPIC = "iam-test-topic-" + UUID -SUBSCRIPTION = "iam-test-subscription-" + UUID +PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] +TOPIC_ID = "iam-test-topic-" + UUID +SUBSCRIPTION_ID = "iam-test-subscription-" + UUID @pytest.fixture(scope="module") -def publisher_client(): +def publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: yield pubsub_v1.PublisherClient() @pytest.fixture(scope="module") -def topic(publisher_client): - topic_path = publisher_client.topic_path(PROJECT, TOPIC) +def topic_path( + publisher_client: pubsub_v1.PublisherClient, +) -> Generator[str, None, None]: + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) try: - publisher_client.delete_topic(topic_path) - except Exception: - pass - - publisher_client.create_topic(topic_path) + topic = publisher_client.get_topic(request={"topic": topic_path}) + except NotFound: + topic = publisher_client.create_topic(request={"name": topic_path}) - yield topic_path + yield topic.name - publisher_client.delete_topic(topic_path) + try: + publisher_client.delete_topic(request={"topic": topic.name}) + except NotFound: + pass @pytest.fixture(scope="module") -def subscriber_client(): +def subscriber_client() -> Generator[pubsub_v1.SubscriberClient, None, None]: subscriber_client = pubsub_v1.SubscriberClient() yield subscriber_client subscriber_client.close() -@pytest.fixture -def subscription(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION) +@pytest.fixture(scope="module") +def subscription_path( + subscriber_client: pubsub_v1.SubscriberClient, + topic_path: str, +) -> Generator[str, None, None]: + subscription_path = subscriber_client.subscription_path(PROJECT_ID, SUBSCRIPTION_ID) + subscription = subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic_path} + ) + yield subscription.name try: - subscriber_client.delete_subscription(subscription_path) - except Exception: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: pass - subscriber_client.create_subscription(subscription_path, topic=topic) - - yield subscription_path - - subscriber_client.delete_subscription(subscription_path) - - -def test_get_topic_policy(topic, capsys): - iam.get_topic_policy(PROJECT, TOPIC) +def test_get_topic_policy(topic_path: str, capsys: CaptureFixture[str]) -> None: + iam.get_topic_policy(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() - assert topic in out - + assert topic_path in out -def test_get_subscription_policy(subscription, capsys): - iam.get_subscription_policy(PROJECT, SUBSCRIPTION) +def test_get_subscription_policy( + subscription_path: str, capsys: CaptureFixture[str] +) -> None: + iam.get_subscription_policy(PROJECT_ID, SUBSCRIPTION_ID) out, _ = capsys.readouterr() - assert subscription in out + assert subscription_path in out -def test_set_topic_policy(publisher_client, topic): - iam.set_topic_policy(PROJECT, TOPIC) - - policy = publisher_client.get_iam_policy(topic) +def test_set_topic_policy( + publisher_client: pubsub_v1.PublisherClient, topic_path: str +) -> None: + iam.set_topic_policy(PROJECT_ID, TOPIC_ID) + policy = publisher_client.get_iam_policy(request={"resource": topic_path}) assert "roles/pubsub.publisher" in str(policy) - assert "allUsers" in str(policy) - + assert "domain:google.com" in str(policy) -def test_set_subscription_policy(subscriber_client, subscription): - iam.set_subscription_policy(PROJECT, SUBSCRIPTION) - policy = subscriber_client.get_iam_policy(subscription) +def test_set_subscription_policy( + subscriber_client: pubsub_v1.SubscriberClient, + subscription_path: str, +) -> None: + iam.set_subscription_policy(PROJECT_ID, SUBSCRIPTION_ID) + policy = subscriber_client.get_iam_policy(request={"resource": subscription_path}) assert "roles/pubsub.viewer" in str(policy) - assert "allUsers" in str(policy) + assert "domain:google.com" in str(policy) -def test_check_topic_permissions(topic, capsys): - iam.check_topic_permissions(PROJECT, TOPIC) - +def test_check_topic_permissions(topic_path: str, capsys: CaptureFixture[str]) -> None: + iam.check_topic_permissions(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() - - assert topic in out + assert topic_path in out assert "pubsub.topics.publish" in out -def test_check_subscription_permissions(subscription, capsys): - iam.check_subscription_permissions(PROJECT, SUBSCRIPTION) - +def test_check_subscription_permissions( + subscription_path: str, + capsys: CaptureFixture[str], +) -> None: + iam.check_subscription_permissions(PROJECT_ID, SUBSCRIPTION_ID) out, _ = capsys.readouterr() - - assert subscription in out + assert subscription_path in out assert "pubsub.subscriptions.consume" in out diff --git a/samples/snippets/mypy.ini b/samples/snippets/mypy.ini new file mode 100644 index 000000000..3c8dd6f41 --- /dev/null +++ b/samples/snippets/mypy.ini @@ -0,0 +1,12 @@ +[mypy] +; We require type annotations in all samples. +strict = True +exclude = noxfile\.py +warn_unused_configs = True + +; Ignore errors caused due to missing library stubs or py.typed marker +; Refer https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-library-stubs-or-py-typed-marker +; Errors ignored instead of adding stubs as a workaround, since this directory contains sample code +; that does not affect the functionality of the client library. +[mypy-avro.*,backoff,flaky,google.cloud.*] +ignore_missing_imports = True diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index ba55d7ce5..c326375be 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -14,9 +14,11 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys +from typing import Callable, Dict, Optional import nox @@ -27,8 +29,10 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -37,24 +41,29 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -64,51 +73,42 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +# All versions used to test samples. +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # -def _determine_local_import_names(start_dir): - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - # Linting with flake8. # # We ignore the following rules: @@ -123,7 +123,6 @@ def _determine_local_import_names(start_dir): "--show-source", "--builtin=gettext", "--max-complexity=20", - "--import-order-style=google", "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", "--max-line-length=88", @@ -131,18 +130,52 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): - session.install("flake8", "flake8-import-order") +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8") + else: + session.install("flake8", "flake8-annotations") - local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), - "." + ".", ] session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + +# +# format = isort + black +# + + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + # # Sample Tests # @@ -151,13 +184,39 @@ def lint(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + # check for presence of tests + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) + test_list.extend(glob.glob("**/tests", recursive=True)) + + if len(test_list) == 0: + print("No tests found, skipping directory.") + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" + concurrent_args = [] if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) @@ -165,26 +224,31 @@ def _session_tests(session, post_install=None): if post_install: post_install(session) + if "pytest-parallel" in packages: + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) + elif "pytest-xdist" in packages: + concurrent_args.extend(["-n", "auto"]) + session.run( "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), # Pytest will return 5 when no tests are collected. This can happen # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # @@ -192,8 +256,8 @@ def py(session): # -def _get_repo_root(): - """ Returns the root folder of the project. """ +def _get_repo_root() -> Optional[str]: + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): @@ -201,6 +265,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") @@ -210,7 +279,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/samples/snippets/noxfile_config.py b/samples/snippets/noxfile_config.py new file mode 100644 index 000000000..545546d21 --- /dev/null +++ b/samples/snippets/noxfile_config.py @@ -0,0 +1,42 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be imported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7", "3.6"], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": True, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} diff --git a/samples/snippets/publisher.py b/samples/snippets/publisher.py index 477b31b9c..d2b6dd2b8 100644 --- a/samples/snippets/publisher.py +++ b/samples/snippets/publisher.py @@ -24,7 +24,7 @@ import argparse -def list_topics(project_id): +def list_topics(project_id: str) -> None: """Lists all Pub/Sub topics in the given project.""" # [START pubsub_list_topics] from google.cloud import pubsub_v1 @@ -33,14 +33,14 @@ def list_topics(project_id): # project_id = "your-project-id" publisher = pubsub_v1.PublisherClient() - project_path = publisher.project_path(project_id) + project_path = f"projects/{project_id}" - for topic in publisher.list_topics(project_path): + for topic in publisher.list_topics(request={"project": project_path}): print(topic) # [END pubsub_list_topics] -def create_topic(project_id, topic_id): +def create_topic(project_id: str, topic_id: str) -> None: """Create a new Pub/Sub topic.""" # [START pubsub_quickstart_create_topic] # [START pubsub_create_topic] @@ -53,14 +53,362 @@ def create_topic(project_id, topic_id): publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_id) - topic = publisher.create_topic(topic_path) + topic = publisher.create_topic(request={"name": topic_path}) - print("Topic created: {}".format(topic)) + print(f"Created topic: {topic.name}") # [END pubsub_quickstart_create_topic] # [END pubsub_create_topic] -def delete_topic(project_id, topic_id): +def create_topic_with_kinesis_ingestion( + project_id: str, + topic_id: str, + stream_arn: str, + consumer_arn: str, + aws_role_arn: str, + gcp_service_account: str, +) -> None: + """Create a new Pub/Sub topic with AWS Kinesis Ingestion Settings.""" + # [START pubsub_create_topic_with_kinesis_ingestion] + from google.cloud import pubsub_v1 + from google.pubsub_v1.types import Topic + from google.pubsub_v1.types import IngestionDataSourceSettings + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # stream_arn = "your-stream-arn" + # consumer_arn = "your-consumer-arn" + # aws_role_arn = "your-aws-role-arn" + # gcp_service_account = "your-gcp-service-account" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + request = Topic( + name=topic_path, + ingestion_data_source_settings=IngestionDataSourceSettings( + aws_kinesis=IngestionDataSourceSettings.AwsKinesis( + stream_arn=stream_arn, + consumer_arn=consumer_arn, + aws_role_arn=aws_role_arn, + gcp_service_account=gcp_service_account, + ) + ), + ) + + topic = publisher.create_topic(request=request) + + print(f"Created topic: {topic.name} with AWS Kinesis Ingestion Settings") + # [END pubsub_create_topic_with_kinesis_ingestion] + + +def create_topic_with_cloud_storage_ingestion( + project_id: str, + topic_id: str, + bucket: str, + input_format: str, + text_delimiter: str, + match_glob: str, + minimum_object_create_time: str, +) -> None: + """Create a new Pub/Sub topic with Cloud Storage Ingestion Settings.""" + # [START pubsub_create_topic_with_cloud_storage_ingestion] + from google.cloud import pubsub_v1 + from google.protobuf import timestamp_pb2 + from google.pubsub_v1.types import Topic + from google.pubsub_v1.types import IngestionDataSourceSettings + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # bucket = "your-bucket" + # input_format = "text" (can be one of "text", "avro", "pubsub_avro") + # text_delimiter = "\n" + # match_glob = "**.txt" + # minimum_object_create_time = "YYYY-MM-DDThh:mm:ssZ" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + cloud_storage_settings = IngestionDataSourceSettings.CloudStorage( + bucket=bucket, + ) + if input_format == "text": + cloud_storage_settings.text_format = ( + IngestionDataSourceSettings.CloudStorage.TextFormat( + delimiter=text_delimiter + ) + ) + elif input_format == "avro": + cloud_storage_settings.avro_format = ( + IngestionDataSourceSettings.CloudStorage.AvroFormat() + ) + elif input_format == "pubsub_avro": + cloud_storage_settings.pubsub_avro_format = ( + IngestionDataSourceSettings.CloudStorage.PubSubAvroFormat() + ) + else: + print( + "Invalid input_format: " + + input_format + + "; must be in ('text', 'avro', 'pubsub_avro')" + ) + return + + if match_glob: + cloud_storage_settings.match_glob = match_glob + + if minimum_object_create_time: + try: + minimum_object_create_time_timestamp = timestamp_pb2.Timestamp() + minimum_object_create_time_timestamp.FromJsonString( + minimum_object_create_time + ) + cloud_storage_settings.minimum_object_create_time = ( + minimum_object_create_time_timestamp + ) + except ValueError: + print("Invalid minimum_object_create_time: " + minimum_object_create_time) + return + + request = Topic( + name=topic_path, + ingestion_data_source_settings=IngestionDataSourceSettings( + cloud_storage=cloud_storage_settings, + ), + ) + + topic = publisher.create_topic(request=request) + + print(f"Created topic: {topic.name} with Cloud Storage Ingestion Settings") + # [END pubsub_create_topic_with_cloud_storage_ingestion] + + +def create_topic_with_aws_msk_ingestion( + project_id: str, + topic_id: str, + cluster_arn: str, + msk_topic: str, + aws_role_arn: str, + gcp_service_account: str, +) -> None: + """Create a new Pub/Sub topic with AWS MSK Ingestion Settings.""" + # [START pubsub_create_topic_with_aws_msk_ingestion] + from google.cloud import pubsub_v1 + from google.pubsub_v1.types import Topic + from google.pubsub_v1.types import IngestionDataSourceSettings + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # cluster_arn = "your-cluster-arn" + # msk_topic = "your-msk-topic" + # aws_role_arn = "your-aws-role-arn" + # gcp_service_account = "your-gcp-service-account" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + request = Topic( + name=topic_path, + ingestion_data_source_settings=IngestionDataSourceSettings( + aws_msk=IngestionDataSourceSettings.AwsMsk( + cluster_arn=cluster_arn, + topic=msk_topic, + aws_role_arn=aws_role_arn, + gcp_service_account=gcp_service_account, + ) + ), + ) + + topic = publisher.create_topic(request=request) + + print(f"Created topic: {topic.name} with AWS MSK Ingestion Settings") + # [END pubsub_create_topic_with_aws_msk_ingestion] + + +def create_topic_with_azure_event_hubs_ingestion( + project_id: str, + topic_id: str, + resource_group: str, + namespace: str, + event_hub: str, + client_id: str, + tenant_id: str, + subscription_id: str, + gcp_service_account: str, +) -> None: + """Create a new Pub/Sub topic with Azure Event Hubs Ingestion Settings.""" + # [START pubsub_create_topic_with_azure_event_hubs_ingestion] + from google.cloud import pubsub_v1 + from google.pubsub_v1.types import Topic + from google.pubsub_v1.types import IngestionDataSourceSettings + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # resource_group = "your-resource-group" + # namespace = "your-namespace" + # event_hub = "your-event-hub" + # client_id = "your-client-id" + # tenant_id = "your-tenant-id" + # subscription_id = "your-subscription-id" + # gcp_service_account = "your-gcp-service-account" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + request = Topic( + name=topic_path, + ingestion_data_source_settings=IngestionDataSourceSettings( + azure_event_hubs=IngestionDataSourceSettings.AzureEventHubs( + resource_group=resource_group, + namespace=namespace, + event_hub=event_hub, + client_id=client_id, + tenant_id=tenant_id, + subscription_id=subscription_id, + gcp_service_account=gcp_service_account, + ) + ), + ) + + topic = publisher.create_topic(request=request) + + print(f"Created topic: {topic.name} with Azure Event Hubs Ingestion Settings") + # [END pubsub_create_topic_with_azure_event_hubs_ingestion] + + +def create_topic_with_confluent_cloud_ingestion( + project_id: str, + topic_id: str, + bootstrap_server: str, + cluster_id: str, + confluent_topic: str, + identity_pool_id: str, + gcp_service_account: str, +) -> None: + """Create a new Pub/Sub topic with Confluent Cloud Ingestion Settings.""" + # [START pubsub_create_topic_with_confluent_cloud_ingestion] + from google.cloud import pubsub_v1 + from google.pubsub_v1.types import Topic + from google.pubsub_v1.types import IngestionDataSourceSettings + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # bootstrap_server = "your-bootstrap-server" + # cluster_id = "your-cluster-id" + # confluent_topic = "your-confluent-topic" + # identity_pool_id = "your-identity-pool-id" + # gcp_service_account = "your-gcp-service-account" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + request = Topic( + name=topic_path, + ingestion_data_source_settings=IngestionDataSourceSettings( + confluent_cloud=IngestionDataSourceSettings.ConfluentCloud( + bootstrap_server=bootstrap_server, + cluster_id=cluster_id, + topic=confluent_topic, + identity_pool_id=identity_pool_id, + gcp_service_account=gcp_service_account, + ) + ), + ) + + topic = publisher.create_topic(request=request) + + print(f"Created topic: {topic.name} with Confluent Cloud Ingestion Settings") + # [END pubsub_create_topic_with_confluent_cloud_ingestion] + + +def create_topic_with_smt( + project_id: str, + topic_id: str, +) -> None: + """Create a new Pub/Sub topic with a UDF SMT.""" + # [START pubsub_create_topic_with_smt] + from google.cloud import pubsub_v1 + from google.pubsub_v1.types import JavaScriptUDF, MessageTransform, Topic + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + code = """function redactSSN(message, metadata) { + const data = JSON.parse(message.data); + delete data['ssn']; + message.data = JSON.stringify(data); + return message; + }""" + udf = JavaScriptUDF(code=code, function_name="redactSSN") + transforms = [MessageTransform(javascript_udf=udf)] + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + request = Topic(name=topic_path, message_transforms=transforms) + + topic = publisher.create_topic(request=request) + + print(f"Created topic: {topic.name} with SMT") + # [END pubsub_create_topic_with_smt] + + +def update_topic_type( + project_id: str, + topic_id: str, + stream_arn: str, + consumer_arn: str, + aws_role_arn: str, + gcp_service_account: str, +) -> None: + """Update Pub/Sub topic with AWS Kinesis Ingestion Settings.""" + # [START pubsub_update_topic_type] + from google.cloud import pubsub_v1 + from google.pubsub_v1.types import Topic + from google.pubsub_v1.types import IngestionDataSourceSettings + from google.pubsub_v1.types import UpdateTopicRequest + from google.protobuf import field_mask_pb2 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # stream_arn = "your-stream-arn" + # consumer_arn = "your-consumer-arn" + # aws_role_arn = "your-aws-role-arn" + # gcp_service_account = "your-gcp-service-account" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + update_request = UpdateTopicRequest( + topic=Topic( + name=topic_path, + ingestion_data_source_settings=IngestionDataSourceSettings( + aws_kinesis=IngestionDataSourceSettings.AwsKinesis( + stream_arn=stream_arn, + consumer_arn=consumer_arn, + aws_role_arn=aws_role_arn, + gcp_service_account=gcp_service_account, + ) + ), + ), + update_mask=field_mask_pb2.FieldMask(paths=["ingestion_data_source_settings"]), + ) + + topic = publisher.update_topic(request=update_request) + print(f"Updated topic: {topic.name} with AWS Kinesis Ingestion Settings") + + +# [END pubsub_update_topic_type] + + +def delete_topic(project_id: str, topic_id: str) -> None: """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_topic] from google.cloud import pubsub_v1 @@ -72,13 +420,90 @@ def delete_topic(project_id, topic_id): publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_id) - publisher.delete_topic(topic_path) + publisher.delete_topic(request={"topic": topic_path}) - print("Topic deleted: {}".format(topic_path)) + print(f"Topic deleted: {topic_path}") # [END pubsub_delete_topic] -def publish_messages(project_id, topic_id): +def pubsub_publish_otel_tracing( + topic_project_id: str, trace_project_id: str, topic_id: str +) -> None: + """ + Publish to `topic_id` in `topic_project_id` with OpenTelemetry enabled. + Export the OpenTelemetry traces to Google Cloud Trace in project + `trace_project_id` + + Args: + topic_project_id: project ID of the topic to publish to. + trace_project_id: project ID to export Cloud Trace to. + topic_id: topic ID to publish to. + + Returns: + None + """ + # [START pubsub_publish_otel_tracing] + + from opentelemetry import trace + from opentelemetry.sdk.trace import TracerProvider + from opentelemetry.sdk.trace.export import ( + BatchSpanProcessor, + ) + from opentelemetry.exporter.cloud_trace import CloudTraceSpanExporter + from opentelemetry.sdk.trace.sampling import TraceIdRatioBased, ParentBased + + from google.cloud.pubsub_v1 import PublisherClient + from google.cloud.pubsub_v1.types import PublisherOptions + + # TODO(developer) + # topic_project_id = "your-topic-project-id" + # trace_project_id = "your-trace-project-id" + # topic_id = "your-topic-id" + + # In this sample, we use a Google Cloud Trace to export the OpenTelemetry + # traces: https://cloud.google.com/trace/docs/setup/python-ot + # Choose and configure the exporter for your set up accordingly. + + sampler = ParentBased(root=TraceIdRatioBased(1)) + trace.set_tracer_provider(TracerProvider(sampler=sampler)) + + # Export to Google Trace. + cloud_trace_exporter = CloudTraceSpanExporter( + project_id=trace_project_id, + ) + trace.get_tracer_provider().add_span_processor( + BatchSpanProcessor(cloud_trace_exporter) + ) + + # Set the `enable_open_telemetry_tracing` option to True when creating + # the publisher client. This in itself is necessary and sufficient for + # the library to export OpenTelemetry traces. However, where the traces + # must be exported to needs to be configured based on your OpenTelemetry + # set up. Refer: https://opentelemetry.io/docs/languages/python/exporters/ + publisher = PublisherClient( + publisher_options=PublisherOptions( + enable_open_telemetry_tracing=True, + ), + ) + + # The `topic_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/topics/{topic_id}` + topic_path = publisher.topic_path(topic_project_id, topic_id) + # Publish messages. + for n in range(1, 10): + data_str = f"Message number {n}" + # Data must be a bytestring + data = data_str.encode("utf-8") + # When you publish a message, the client returns a future. + future = publisher.publish(topic_path, data) + print(future.result()) + + print(f"Published messages to {topic_path}.") + + # [END pubsub_publish_otel_tracing] + + +def publish_messages(project_id: str, topic_id: str) -> None: """Publishes multiple messages to a Pub/Sub topic.""" # [START pubsub_quickstart_publisher] # [START pubsub_publish] @@ -94,19 +519,19 @@ def publish_messages(project_id, topic_id): topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): - data = u"Message number {}".format(n) + data_str = f"Message number {n}" # Data must be a bytestring - data = data.encode("utf-8") + data = data_str.encode("utf-8") # When you publish a message, the client returns a future. - future = publisher.publish(topic_path, data=data) + future = publisher.publish(topic_path, data) print(future.result()) - print("Published messages.") + print(f"Published messages to {topic_path}.") # [END pubsub_quickstart_publisher] # [END pubsub_publish] -def publish_messages_with_custom_attributes(project_id, topic_id): +def publish_messages_with_custom_attributes(project_id: str, topic_id: str) -> None: """Publishes multiple messages with custom attributes to a Pub/Sub topic.""" # [START pubsub_publish_custom_attributes] @@ -120,25 +545,25 @@ def publish_messages_with_custom_attributes(project_id, topic_id): topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): - data = u"Message number {}".format(n) + data_str = f"Message number {n}" # Data must be a bytestring - data = data.encode("utf-8") + data = data_str.encode("utf-8") # Add two attributes, origin and username, to the message future = publisher.publish( topic_path, data, origin="python-sample", username="gcp" ) print(future.result()) - print("Published messages with custom attributes.") + print(f"Published messages with custom attributes to {topic_path}.") # [END pubsub_publish_custom_attributes] -def publish_messages_with_error_handler(project_id, topic_id): - # [START pubsub_publish_messages_error_handler] +def publish_messages_with_error_handler(project_id: str, topic_id: str) -> None: + # [START pubsub_publish_with_error_handler] """Publishes multiple messages to a Pub/Sub topic with an error handler.""" - import time - + from concurrent import futures from google.cloud import pubsub_v1 + from typing import Callable # TODO(developer) # project_id = "your-project-id" @@ -146,49 +571,47 @@ def publish_messages_with_error_handler(project_id, topic_id): publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_id) + publish_futures = [] - futures = dict() - - def get_callback(f, data): - def callback(f): + def get_callback( + publish_future: pubsub_v1.publisher.futures.Future, data: str + ) -> Callable[[pubsub_v1.publisher.futures.Future], None]: + def callback(publish_future: pubsub_v1.publisher.futures.Future) -> None: try: - print(f.result()) - futures.pop(data) - except: # noqa - print("Please handle {} for {}.".format(f.exception(), data)) + # Wait 60 seconds for the publish call to succeed. + print(publish_future.result(timeout=60)) + except futures.TimeoutError: + print(f"Publishing {data} timed out.") return callback for i in range(10): data = str(i) - futures.update({data: None}) # When you publish a message, the client returns a future. - future = publisher.publish( - topic_path, data=data.encode("utf-8") # data must be a bytestring. - ) - futures[data] = future - # Publish failures shall be handled in the callback function. - future.add_done_callback(get_callback(future, data)) + publish_future = publisher.publish(topic_path, data.encode("utf-8")) + # Non-blocking. Publish failures are handled in the callback function. + publish_future.add_done_callback(get_callback(publish_future, data)) + publish_futures.append(publish_future) # Wait for all the publish futures to resolve before exiting. - while futures: - time.sleep(5) + futures.wait(publish_futures, return_when=futures.ALL_COMPLETED) - print("Published message with error handler.") - # [END pubsub_publish_messages_error_handler] + print(f"Published messages with error handler to {topic_path}.") + # [END pubsub_publish_with_error_handler] -def publish_messages_with_batch_settings(project_id, topic_id): +def publish_messages_with_batch_settings(project_id: str, topic_id: str) -> None: """Publishes multiple messages to a Pub/Sub topic with batch settings.""" # [START pubsub_publisher_batch_settings] + from concurrent import futures from google.cloud import pubsub_v1 # TODO(developer) # project_id = "your-project-id" # topic_id = "your-topic-id" - # Configure the batch to publish as soon as there is ten messages, - # one kilobyte of data, or one second has passed. + # Configure the batch to publish as soon as there are 10 messages + # or 1 KiB of data, or 1 second has passed. batch_settings = pubsub_v1.types.BatchSettings( max_messages=10, # default 100 max_bytes=1024, # default 1 MB @@ -196,86 +619,233 @@ def publish_messages_with_batch_settings(project_id, topic_id): ) publisher = pubsub_v1.PublisherClient(batch_settings) topic_path = publisher.topic_path(project_id, topic_id) + publish_futures = [] # Resolve the publish future in a separate thread. - def callback(future): + def callback(future: pubsub_v1.publisher.futures.Future) -> None: message_id = future.result() print(message_id) for n in range(1, 10): - data = u"Message number {}".format(n) + data_str = f"Message number {n}" # Data must be a bytestring - data = data.encode("utf-8") - future = publisher.publish(topic_path, data=data) + data = data_str.encode("utf-8") + publish_future = publisher.publish(topic_path, data) # Non-blocking. Allow the publisher client to batch multiple messages. - future.add_done_callback(callback) + publish_future.add_done_callback(callback) + publish_futures.append(publish_future) - print("Published messages with batch settings.") + futures.wait(publish_futures, return_when=futures.ALL_COMPLETED) + + print(f"Published messages with batch settings to {topic_path}.") # [END pubsub_publisher_batch_settings] -def publish_messages_with_retry_settings(project_id, topic_id): +def publish_messages_with_flow_control_settings(project_id: str, topic_id: str) -> None: + """Publishes messages to a Pub/Sub topic with flow control settings.""" + # [START pubsub_publisher_flow_control] + from concurrent import futures + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1.types import ( + LimitExceededBehavior, + PublisherOptions, + PublishFlowControl, + ) + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + # Configure how many messages the publisher client can hold in memory + # and what to do when messages exceed the limit. + flow_control_settings = PublishFlowControl( + message_limit=100, # 100 messages + byte_limit=10 * 1024 * 1024, # 10 MiB + limit_exceeded_behavior=LimitExceededBehavior.BLOCK, + ) + publisher = pubsub_v1.PublisherClient( + publisher_options=PublisherOptions(flow_control=flow_control_settings) + ) + topic_path = publisher.topic_path(project_id, topic_id) + publish_futures = [] + + # Resolve the publish future in a separate thread. + def callback(publish_future: pubsub_v1.publisher.futures.Future) -> None: + message_id = publish_future.result() + print(message_id) + + # Publish 1000 messages in quick succession may be constrained by + # publisher flow control. + for n in range(1, 1000): + data_str = f"Message number {n}" + # Data must be a bytestring + data = data_str.encode("utf-8") + publish_future = publisher.publish(topic_path, data) + # Non-blocking. Allow the publisher client to batch messages. + publish_future.add_done_callback(callback) + publish_futures.append(publish_future) + + futures.wait(publish_futures, return_when=futures.ALL_COMPLETED) + + print(f"Published messages with flow control settings to {topic_path}.") + # [END pubsub_publisher_flow_control] + + +def publish_messages_with_retry_settings(project_id: str, topic_id: str) -> None: """Publishes messages with custom retry settings.""" # [START pubsub_publisher_retry_settings] + from google import api_core from google.cloud import pubsub_v1 # TODO(developer) # project_id = "your-project-id" # topic_id = "your-topic-id" - # Configure the retry settings. Defaults will be overwritten. - retry_settings = { - "interfaces": { - "google.pubsub.v1.Publisher": { - "retry_codes": { - "publish": [ - "ABORTED", - "CANCELLED", - "DEADLINE_EXCEEDED", - "INTERNAL", - "RESOURCE_EXHAUSTED", - "UNAVAILABLE", - "UNKNOWN", - ] - }, - "retry_params": { - "messaging": { - "initial_retry_delay_millis": 100, # default: 100 - "retry_delay_multiplier": 1.3, # default: 1.3 - "max_retry_delay_millis": 60000, # default: 60000 - "initial_rpc_timeout_millis": 5000, # default: 25000 - "rpc_timeout_multiplier": 1.0, # default: 1.0 - "max_rpc_timeout_millis": 600000, # default: 30000 - "total_timeout_millis": 600000, # default: 600000 - } - }, - "methods": { - "Publish": { - "retry_codes_name": "publish", - "retry_params_name": "messaging", - } - }, - } - } - } - - publisher = pubsub_v1.PublisherClient(client_config=retry_settings) + # Configure the retry settings. Defaults shown in comments are values applied + # by the library by default, instead of default values in the Retry object. + custom_retry = api_core.retry.Retry( + initial=0.250, # seconds (default: 0.1) + maximum=90.0, # seconds (default: 60.0) + multiplier=1.45, # default: 1.3 + deadline=300.0, # seconds (default: 60.0) + predicate=api_core.retry.if_exception_type( + api_core.exceptions.Aborted, + api_core.exceptions.DeadlineExceeded, + api_core.exceptions.InternalServerError, + api_core.exceptions.ResourceExhausted, + api_core.exceptions.ServiceUnavailable, + api_core.exceptions.Unknown, + api_core.exceptions.Cancelled, + ), + ) + + publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): - data = u"Message number {}".format(n) + data_str = f"Message number {n}" # Data must be a bytestring - data = data.encode("utf-8") - future = publisher.publish(topic_path, data=data) + data = data_str.encode("utf-8") + future = publisher.publish(topic=topic_path, data=data, retry=custom_retry) print(future.result()) - print("Published messages with retry settings.") + print(f"Published messages with retry settings to {topic_path}.") # [END pubsub_publisher_retry_settings] -if __name__ == "__main__": +def publish_with_ordering_keys(project_id: str, topic_id: str) -> None: + """Publishes messages with ordering keys.""" + # [START pubsub_publish_with_ordering_keys] + from google.cloud import pubsub_v1 + + # TODO(developer): Choose an existing topic. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + publisher_options = pubsub_v1.types.PublisherOptions(enable_message_ordering=True) + # Sending messages to the same region ensures they are received in order + # even when multiple publishers are used. + client_options = {"api_endpoint": "us-east1-pubsub.googleapis.com:443"} + publisher = pubsub_v1.PublisherClient( + publisher_options=publisher_options, client_options=client_options + ) + # The `topic_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/topics/{topic_id}` + topic_path = publisher.topic_path(project_id, topic_id) + + for message in [ + ("message1", "key1"), + ("message2", "key2"), + ("message3", "key1"), + ("message4", "key2"), + ]: + # Data must be a bytestring + data = message[0].encode("utf-8") + ordering_key = message[1] + # When you publish a message, the client returns a future. + future = publisher.publish(topic_path, data=data, ordering_key=ordering_key) + print(future.result()) + + print(f"Published messages with ordering keys to {topic_path}.") + # [END pubsub_publish_with_ordering_keys] + + +def resume_publish_with_ordering_keys(project_id: str, topic_id: str) -> None: + """Resume publishing messages with ordering keys when unrecoverable errors occur.""" + # [START pubsub_resume_publish_with_ordering_keys] + from google.cloud import pubsub_v1 + + # TODO(developer): Choose an existing topic. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + publisher_options = pubsub_v1.types.PublisherOptions(enable_message_ordering=True) + # Sending messages to the same region ensures they are received in order + # even when multiple publishers are used. + client_options = {"api_endpoint": "us-east1-pubsub.googleapis.com:443"} + publisher = pubsub_v1.PublisherClient( + publisher_options=publisher_options, client_options=client_options + ) + # The `topic_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/topics/{topic_id}` + topic_path = publisher.topic_path(project_id, topic_id) + + for message in [ + ("message1", "key1"), + ("message2", "key2"), + ("message3", "key1"), + ("message4", "key2"), + ]: + # Data must be a bytestring + data = message[0].encode("utf-8") + ordering_key = message[1] + # When you publish a message, the client returns a future. + future = publisher.publish(topic_path, data=data, ordering_key=ordering_key) + try: + print(future.result()) + except RuntimeError: + # Resume publish on an ordering key that has had unrecoverable errors. + publisher.resume_publish(topic_path, ordering_key) + + print(f"Resumed publishing messages with ordering keys to {topic_path}.") + # [END pubsub_resume_publish_with_ordering_keys] + + +def detach_subscription(project_id: str, subscription_id: str) -> None: + """Detaches a subscription from a topic and drops all messages retained in it.""" + # [START pubsub_detach_subscription] + from google.api_core.exceptions import GoogleAPICallError, RetryError + from google.cloud import pubsub_v1 + + # TODO(developer): Choose an existing subscription. + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + + publisher_client = pubsub_v1.PublisherClient() + subscriber_client = pubsub_v1.SubscriberClient() + subscription_path = subscriber_client.subscription_path(project_id, subscription_id) + + try: + publisher_client.detach_subscription( + request={"subscription": subscription_path} + ) + except (GoogleAPICallError, RetryError, ValueError, Exception) as err: + print(err) + + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) + if subscription.detached: + print(f"{subscription_path} is detached.") + else: + print(f"{subscription_path} is NOT detached.") + # [END pubsub_detach_subscription] + + +if __name__ == "__main__": # noqa: C901 parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument("project_id", help="Your Google Cloud project ID") @@ -285,6 +855,86 @@ def publish_messages_with_retry_settings(project_id, topic_id): create_parser = subparsers.add_parser("create", help=create_topic.__doc__) create_parser.add_argument("topic_id") + pubsub_publish_otel_tracing_parser = subparsers.add_parser( + "pubsub-publish-otel-tracing", help=pubsub_publish_otel_tracing.__doc__ + ) + pubsub_publish_otel_tracing_parser.add_argument("topic_project_id") + pubsub_publish_otel_tracing_parser.add_argument("trace_project_id") + pubsub_publish_otel_tracing_parser.add_argument("topic_id") + + create_topic_with_kinesis_ingestion_parser = subparsers.add_parser( + "create_kinesis_ingestion", help=create_topic_with_kinesis_ingestion.__doc__ + ) + create_topic_with_kinesis_ingestion_parser.add_argument("topic_id") + create_topic_with_kinesis_ingestion_parser.add_argument("stream_arn") + create_topic_with_kinesis_ingestion_parser.add_argument("consumer_arn") + create_topic_with_kinesis_ingestion_parser.add_argument("aws_role_arn") + create_topic_with_kinesis_ingestion_parser.add_argument("gcp_service_account") + + create_topic_with_cloud_storage_ingestion_parser = subparsers.add_parser( + "create_cloud_storage_ingestion", + help=create_topic_with_cloud_storage_ingestion.__doc__, + ) + create_topic_with_cloud_storage_ingestion_parser.add_argument("topic_id") + create_topic_with_cloud_storage_ingestion_parser.add_argument("bucket") + create_topic_with_cloud_storage_ingestion_parser.add_argument("input_format") + create_topic_with_cloud_storage_ingestion_parser.add_argument("text_delimiter") + create_topic_with_cloud_storage_ingestion_parser.add_argument("match_glob") + create_topic_with_cloud_storage_ingestion_parser.add_argument( + "minimum_object_create_time" + ) + + create_topic_with_aws_msk_ingestion_parser = subparsers.add_parser( + "create_aws_msk_ingestion", help=create_topic_with_aws_msk_ingestion.__doc__ + ) + create_topic_with_aws_msk_ingestion_parser.add_argument("topic_id") + create_topic_with_aws_msk_ingestion_parser.add_argument("cluster_arn") + create_topic_with_aws_msk_ingestion_parser.add_argument("msk_topic") + create_topic_with_aws_msk_ingestion_parser.add_argument("aws_role_arn") + create_topic_with_aws_msk_ingestion_parser.add_argument("gcp_service_account") + + create_topic_with_azure_event_hubs_ingestion_parser = subparsers.add_parser( + "create_azure_event_hubs_ingestion", + help=create_topic_with_azure_event_hubs_ingestion.__doc__, + ) + create_topic_with_azure_event_hubs_ingestion_parser.add_argument("topic_id") + create_topic_with_azure_event_hubs_ingestion_parser.add_argument("resource_group") + create_topic_with_azure_event_hubs_ingestion_parser.add_argument("namespace") + create_topic_with_azure_event_hubs_ingestion_parser.add_argument("event_hub") + create_topic_with_azure_event_hubs_ingestion_parser.add_argument("client_id") + create_topic_with_azure_event_hubs_ingestion_parser.add_argument("tenant_id") + create_topic_with_azure_event_hubs_ingestion_parser.add_argument("subscription_id") + create_topic_with_azure_event_hubs_ingestion_parser.add_argument( + "gcp_service_account" + ) + + create_topic_with_confluent_cloud_ingestion_parser = subparsers.add_parser( + "create_confluent_cloud_ingestion", + help=create_topic_with_confluent_cloud_ingestion.__doc__, + ) + create_topic_with_confluent_cloud_ingestion_parser.add_argument("topic_id") + create_topic_with_confluent_cloud_ingestion_parser.add_argument("bootstrap_server") + create_topic_with_confluent_cloud_ingestion_parser.add_argument("cluster_id") + create_topic_with_confluent_cloud_ingestion_parser.add_argument("confluent_topic") + create_topic_with_confluent_cloud_ingestion_parser.add_argument("identity_pool_id") + create_topic_with_confluent_cloud_ingestion_parser.add_argument( + "gcp_service_account" + ) + + create_parser = subparsers.add_parser( + "create_smt", help=create_topic_with_smt.__doc__ + ) + create_parser.add_argument("topic_id") + + update_topic_type_parser = subparsers.add_parser( + "update_kinesis_ingestion", help=update_topic_type.__doc__ + ) + update_topic_type_parser.add_argument("topic_id") + update_topic_type_parser.add_argument("stream_arn") + update_topic_type_parser.add_argument("consumer_arn") + update_topic_type_parser.add_argument("aws_role_arn") + update_topic_type_parser.add_argument("gcp_service_account") + delete_parser = subparsers.add_parser("delete", help=delete_topic.__doc__) delete_parser.add_argument("topic_id") @@ -298,7 +948,8 @@ def publish_messages_with_retry_settings(project_id, topic_id): publish_with_custom_attributes_parser.add_argument("topic_id") publish_with_error_handler_parser = subparsers.add_parser( - "publish-with-error-handler", help=publish_messages_with_error_handler.__doc__, + "publish-with-error-handler", + help=publish_messages_with_error_handler.__doc__, ) publish_with_error_handler_parser.add_argument("topic_id") @@ -308,18 +959,106 @@ def publish_messages_with_retry_settings(project_id, topic_id): ) publish_with_batch_settings_parser.add_argument("topic_id") + publish_with_flow_control_settings_parser = subparsers.add_parser( + "publish-with-flow-control", + help=publish_messages_with_flow_control_settings.__doc__, + ) + publish_with_flow_control_settings_parser.add_argument("topic_id") + publish_with_retry_settings_parser = subparsers.add_parser( "publish-with-retry-settings", help=publish_messages_with_retry_settings.__doc__, ) publish_with_retry_settings_parser.add_argument("topic_id") + publish_with_ordering_keys_parser = subparsers.add_parser( + "publish-with-ordering-keys", + help=publish_with_ordering_keys.__doc__, + ) + publish_with_ordering_keys_parser.add_argument("topic_id") + + resume_publish_with_ordering_keys_parser = subparsers.add_parser( + "resume-publish-with-ordering-keys", + help=resume_publish_with_ordering_keys.__doc__, + ) + resume_publish_with_ordering_keys_parser.add_argument("topic_id") + + detach_subscription_parser = subparsers.add_parser( + "detach-subscription", + help=detach_subscription.__doc__, + ) + detach_subscription_parser.add_argument("subscription_id") + args = parser.parse_args() if args.command == "list": list_topics(args.project_id) elif args.command == "create": create_topic(args.project_id, args.topic_id) + elif args.command == "create_kinesis_ingestion": + create_topic_with_kinesis_ingestion( + args.project_id, + args.topic_id, + args.stream_arn, + args.consumer_arn, + args.aws_role_arn, + args.gcp_service_account, + ) + elif args.command == "create_cloud_storage_ingestion": + create_topic_with_cloud_storage_ingestion( + args.project_id, + args.topic_id, + args.bucket, + args.input_format, + args.text_delimiter, + args.match_glob, + args.minimum_object_create_time, + ) + elif args.command == "create_aws_msk_ingestion": + create_topic_with_aws_msk_ingestion( + args.project_id, + args.topic_id, + args.cluster_arn, + args.msk_topic, + args.aws_role_arn, + args.gcp_service_account, + ) + elif args.command == "create_azure_event_hubs_ingestion": + create_topic_with_azure_event_hubs_ingestion( + args.project_id, + args.topic_id, + args.resource_group, + args.namespace, + args.event_hub, + args.client_id, + args.tenant_id, + args.subscription_id, + args.gcp_service_account, + ) + elif args.command == "create_confluent_cloud_ingestion": + create_topic_with_confluent_cloud_ingestion( + args.project_id, + args.topic_id, + args.bootstrap_server, + args.cluster_id, + args.confluent_topic, + args.identity_pool_id, + args.gcp_service_account, + ) + elif args.command == "create_smt": + create_topic_with_smt( + args.project_id, + args.topic_id, + ) + elif args.command == "update_kinesis_ingestion": + update_topic_type( + args.project_id, + args.topic_id, + args.stream_arn, + args.consumer_arn, + args.aws_role_arn, + args.gcp_service_account, + ) elif args.command == "delete": delete_topic(args.project_id, args.topic_id) elif args.command == "publish": @@ -330,5 +1069,17 @@ def publish_messages_with_retry_settings(project_id, topic_id): publish_messages_with_error_handler(args.project_id, args.topic_id) elif args.command == "publish-with-batch-settings": publish_messages_with_batch_settings(args.project_id, args.topic_id) + elif args.command == "publish-with-flow-control": + publish_messages_with_flow_control_settings(args.project_id, args.topic_id) elif args.command == "publish-with-retry-settings": publish_messages_with_retry_settings(args.project_id, args.topic_id) + elif args.command == "publish-with-ordering-keys": + publish_with_ordering_keys(args.project_id, args.topic_id) + elif args.command == "resume-publish-with-ordering-keys": + resume_publish_with_ordering_keys(args.project_id, args.topic_id) + elif args.command == "detach-subscription": + detach_subscription(args.project_id, args.subscription_id) + elif args.command == "pubsub-publish-otel-tracing": + pubsub_publish_otel_tracing( + args.topic_project_id, args.trace_project_id, args.topic_id + ) diff --git a/samples/snippets/publisher_test.py b/samples/snippets/publisher_test.py index b5c2ea1ea..1c691bd5c 100644 --- a/samples/snippets/publisher_test.py +++ b/samples/snippets/publisher_test.py @@ -14,57 +14,90 @@ import os import time +import typing +from typing import Any, Callable, cast, Iterator, TypeVar, Union import uuid +from _pytest.capture import CaptureFixture import backoff +from google.api_core.exceptions import NotFound from google.cloud import pubsub_v1 import mock import pytest import publisher + +# This uuid is shared across tests which run in parallel. UUID = uuid.uuid4().hex -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] -TOPIC_ADMIN = "publisher-test-topic-admin-" + UUID -TOPIC_PUBLISH = "publisher-test-topic-publish-" + UUID +PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] +TOPIC_ID = "publisher-test-topic-" + UUID +SUBSCRIPTION_ID = "publisher-test-subscription-" + UUID +# Allow 60s for tests to finish. +MAX_TIME = 60 +# These tests run in parallel if pytest-parallel is installed. +# Avoid modifying resources that are shared across tests, +# as this results in test flake. -@pytest.fixture -def client(): - yield pubsub_v1.PublisherClient() +if typing.TYPE_CHECKING: + from unittest.mock import AsyncMock, MagicMock + MockType = Union[MagicMock, AsyncMock] -@pytest.fixture -def topic_admin(client): - topic_path = client.topic_path(PROJECT, TOPIC_ADMIN) - try: - topic = client.get_topic(topic_path) - except: # noqa - topic = client.create_topic(topic_path) +@pytest.fixture(scope="module") +def publisher_client() -> Iterator[pubsub_v1.PublisherClient]: + yield pubsub_v1.PublisherClient() - yield topic.name - # Teardown of `topic_admin` is handled in `test_delete()`. + +@pytest.fixture(scope="module") +def subscriber_client() -> Iterator[pubsub_v1.SubscriberClient]: + subscriber_client = pubsub_v1.SubscriberClient() + yield subscriber_client + # Close the subscriber client properly during teardown. + subscriber_client.close() -@pytest.fixture -def topic_publish(client): - topic_path = client.topic_path(PROJECT, TOPIC_PUBLISH) +@pytest.fixture(scope="module") +def topic_path(publisher_client: pubsub_v1.PublisherClient) -> Iterator[str]: + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) try: - topic = client.get_topic(topic_path) - except: # noqa - topic = client.create_topic(topic_path) + topic = publisher_client.get_topic(request={"topic": topic_path}) + except NotFound: + topic = publisher_client.create_topic(request={"name": topic_path}) yield topic.name - client.delete_topic(topic.name) + try: + publisher_client.delete_topic(request={"topic": topic.name}) + except NotFound: + pass + +@pytest.fixture(scope="module") +def subscription_path( + subscriber_client: pubsub_v1.SubscriberClient, topic_path: str +) -> Iterator[str]: + subscription_path = subscriber_client.subscription_path(PROJECT_ID, SUBSCRIPTION_ID) + subscription = subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic_path} + ) + yield subscription.name -def _make_sleep_patch(): + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass + + +def _make_sleep_patch() -> 'mock.mock._patch["MockType"]': real_sleep = time.sleep - def new_sleep(period): + def new_sleep(period: float) -> None: if period == 60: real_sleep(5) raise RuntimeError("sigil") @@ -74,73 +107,373 @@ def new_sleep(period): return mock.patch("time.sleep", new=new_sleep) -def test_list(client, topic_admin, capsys): - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - publisher.list_topics(PROJECT) - out, _ = capsys.readouterr() - assert topic_admin in out +def test_create( + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] +) -> None: + # The scope of `topic_path` is limited to this function. + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + + try: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: + pass + + publisher.create_topic(PROJECT_ID, TOPIC_ID) + + out, _ = capsys.readouterr() + assert f"Created topic: {topic_path}" in out + + # Clean up resource created for the test. + publisher_client.delete_topic(request={"topic": topic_path}) - eventually_consistent_test() +def test_create_topic_with_kinesis_ingestion( + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] +) -> None: + # The scope of `topic_path` is limited to this function. + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + + # Outside of automated CI tests, these values must be of actual AWS resources for the test to pass. + stream_arn = "arn:aws:kinesis:us-west-2:111111111111:stream/fake-stream-name" + consumer_arn = "arn:aws:kinesis:us-west-2:111111111111:stream/fake-stream-name/consumer/consumer-1:1111111111" + aws_role_arn = "arn:aws:iam::111111111111:role/fake-role-name" + gcp_service_account = ( + "fake-service-account@fake-gcp-project.iam.gserviceaccount.com" + ) -def test_create(client): - topic_path = client.topic_path(PROJECT, TOPIC_ADMIN) try: - client.delete_topic(topic_path) - except Exception: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: pass - publisher.create_topic(PROJECT, TOPIC_ADMIN) + publisher.create_topic_with_kinesis_ingestion( + PROJECT_ID, + TOPIC_ID, + stream_arn, + consumer_arn, + aws_role_arn, + gcp_service_account, + ) - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - assert client.get_topic(topic_path) + out, _ = capsys.readouterr() + assert f"Created topic: {topic_path} with AWS Kinesis Ingestion Settings" in out - eventually_consistent_test() + # Clean up resource created for the test. + publisher_client.delete_topic(request={"topic": topic_path}) -def test_delete(client, topic_admin): - publisher.delete_topic(PROJECT, TOPIC_ADMIN) +def test_create_topic_with_cloud_storage_ingestion( + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] +) -> None: + # The scope of `topic_path` is limited to this function. + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - with pytest.raises(Exception): - client.get_topic(client.topic_path(PROJECT, TOPIC_ADMIN)) + bucket = "pubsub-cloud-storage-bucket" + input_format = "text" + text_delimiter = "," + match_glob = "**.txt" + minimum_object_create_time = "1970-01-01T00:00:01Z" - eventually_consistent_test() + try: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: + pass + + publisher.create_topic_with_cloud_storage_ingestion( + PROJECT_ID, + TOPIC_ID, + bucket, + input_format, + text_delimiter, + match_glob, + minimum_object_create_time, + ) + + out, _ = capsys.readouterr() + assert f"Created topic: {topic_path} with Cloud Storage Ingestion Settings" in out + + # Clean up resource created for the test. + publisher_client.delete_topic(request={"topic": topic_path}) + + +def test_create_topic_with_aws_msk_ingestion( + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] +) -> None: + # The scope of `topic_path` is limited to this function. + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + + # Outside of automated CI tests, these values must be of actual AWS resources for the test to pass. + cluster_arn = ( + "arn:aws:kafka:us-east-1:111111111111:cluster/fake-cluster-name/11111111-1111-1" + ) + msk_topic = "fake-msk-topic-name" + aws_role_arn = "arn:aws:iam::111111111111:role/fake-role-name" + gcp_service_account = ( + "fake-service-account@fake-gcp-project.iam.gserviceaccount.com" + ) + + try: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: + pass + + publisher.create_topic_with_aws_msk_ingestion( + PROJECT_ID, + TOPIC_ID, + cluster_arn, + msk_topic, + aws_role_arn, + gcp_service_account, + ) + + out, _ = capsys.readouterr() + assert f"Created topic: {topic_path} with AWS MSK Ingestion Settings" in out + + # Clean up resource created for the test. + publisher_client.delete_topic(request={"topic": topic_path}) + + +def test_create_topic_with_azure_event_hubs_ingestion( + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] +) -> None: + # The scope of `topic_path` is limited to this function. + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + + # Outside of automated CI tests, these values must be of actual Azure resources for the test to pass. + resource_group = "fake-resource-group" + namespace = "fake-namespace" + event_hub = "fake-event-hub" + client_id = "fake-client-id" + tenant_id = "fake-tenant-id" + subcription_id = "fake-subscription-id" + gcp_service_account = ( + "fake-service-account@fake-gcp-project.iam.gserviceaccount.com" + ) + + try: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: + pass + + publisher.create_topic_with_azure_event_hubs_ingestion( + PROJECT_ID, + TOPIC_ID, + resource_group, + namespace, + event_hub, + client_id, + tenant_id, + subcription_id, + gcp_service_account, + ) + + out, _ = capsys.readouterr() + assert ( + f"Created topic: {topic_path} with Azure Event Hubs Ingestion Settings" in out + ) + + # Clean up resource created for the test. + publisher_client.delete_topic(request={"topic": topic_path}) + + +def test_create_topic_with_confluent_cloud_ingestion( + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] +) -> None: + # The scope of `topic_path` is limited to this function. + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + + # Outside of automated CI tests, these values must be of actual Confluent resources for the test to pass. + bootstrap_server = "fake-bootstrap-server-id.us-south1.gcp.confluent.cloud:9092" + cluster_id = "fake-cluster-id" + confluent_topic = "fake-confluent-topic-name" + identity_pool_id = "fake-identity-pool-id" + gcp_service_account = ( + "fake-service-account@fake-gcp-project.iam.gserviceaccount.com" + ) + + try: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: + pass + + publisher.create_topic_with_confluent_cloud_ingestion( + PROJECT_ID, + TOPIC_ID, + bootstrap_server, + cluster_id, + confluent_topic, + identity_pool_id, + gcp_service_account, + ) + + out, _ = capsys.readouterr() + assert f"Created topic: {topic_path} with Confluent Cloud Ingestion Settings" in out + + # Clean up resource created for the test. + publisher_client.delete_topic(request={"topic": topic_path}) + + +def test_create_with_smt( + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] +) -> None: + # The scope of `topic_path` is limited to this function. + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + + try: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: + pass + + publisher.create_topic_with_smt(PROJECT_ID, TOPIC_ID) + + out, _ = capsys.readouterr() + assert f"Created topic: {topic_path} with SMT" in out + + # Clean up resource created for the test. + publisher_client.delete_topic(request={"topic": topic_path}) + + +def test_update_topic_type( + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] +) -> None: + # The scope of `topic_path` is limited to this function. + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + + # Outside of automated CI tests, these values must be of actual AWS resources for the test to pass. + stream_arn = "arn:aws:kinesis:us-west-2:111111111111:stream/fake-stream-name" + consumer_arn = "arn:aws:kinesis:us-west-2:111111111111:stream/fake-stream-name/consumer/consumer-1:1111111111" + aws_role_arn = "arn:aws:iam::111111111111:role/fake-role-name" + gcp_service_account = ( + "fake-service-account@fake-gcp-project.iam.gserviceaccount.com" + ) + + try: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: + pass + + publisher.create_topic(PROJECT_ID, TOPIC_ID) + + out, _ = capsys.readouterr() + assert f"Created topic: {topic_path}" in out + + publisher.update_topic_type( + PROJECT_ID, + TOPIC_ID, + stream_arn, + consumer_arn, + aws_role_arn, + gcp_service_account, + ) + + out, _ = capsys.readouterr() + assert f"Updated topic: {topic_path} with AWS Kinesis Ingestion Settings" in out + + # Clean up resource created for the test. + publisher_client.delete_topic(request={"topic": topic_path}) -def test_publish(topic_publish, capsys): - publisher.publish_messages(PROJECT, TOPIC_PUBLISH) +def test_list(topic_path: str, capsys: CaptureFixture[str]) -> None: + publisher.list_topics(PROJECT_ID) + out, _ = capsys.readouterr() + + assert topic_path in out + + +def test_publish(topic_path: str, capsys: CaptureFixture[str]) -> None: + publisher.publish_messages(PROJECT_ID, TOPIC_ID) + + out, _ = capsys.readouterr() + assert f"Published messages to {topic_path}." in out + + +def test_publish_with_custom_attributes( + topic_path: str, capsys: CaptureFixture[str] +) -> None: + publisher.publish_messages_with_custom_attributes(PROJECT_ID, TOPIC_ID) + + out, _ = capsys.readouterr() + assert f"Published messages with custom attributes to {topic_path}." in out + + +def test_publish_with_batch_settings( + topic_path: str, capsys: CaptureFixture[str] +) -> None: + publisher.publish_messages_with_batch_settings(PROJECT_ID, TOPIC_ID) + + out, _ = capsys.readouterr() + assert f"Published messages with batch settings to {topic_path}." in out + + +def test_publish_with_flow_control_settings( + topic_path: str, capsys: CaptureFixture[str] +) -> None: + publisher.publish_messages_with_flow_control_settings(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() - assert "Published" in out + assert f"Published messages with flow control settings to {topic_path}." in out -def test_publish_with_custom_attributes(topic_publish, capsys): - publisher.publish_messages_with_custom_attributes(PROJECT, TOPIC_PUBLISH) +def test_publish_with_retry_settings( + topic_path: str, capsys: CaptureFixture[str] +) -> None: + publisher.publish_messages_with_retry_settings(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() - assert "Published" in out + assert f"Published messages with retry settings to {topic_path}." in out -def test_publish_with_batch_settings(topic_publish, capsys): - publisher.publish_messages_with_batch_settings(PROJECT, TOPIC_PUBLISH) +def test_publish_with_error_handler( + topic_path: str, capsys: CaptureFixture[str] +) -> None: + publisher.publish_messages_with_error_handler(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() - assert "Published" in out + assert f"Published messages with error handler to {topic_path}." in out -def test_publish_with_retry_settings(topic_publish, capsys): - publisher.publish_messages_with_retry_settings(PROJECT, TOPIC_PUBLISH) +def test_publish_with_ordering_keys( + topic_path: str, capsys: CaptureFixture[str] +) -> None: + publisher.publish_with_ordering_keys(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() - assert "Published" in out + assert f"Published messages with ordering keys to {topic_path}." in out -def test_publish_with_error_handler(topic_publish, capsys): - publisher.publish_messages_with_error_handler(PROJECT, TOPIC_PUBLISH) +def test_resume_publish_with_error_handler( + topic_path: str, capsys: CaptureFixture[str] +) -> None: + publisher.resume_publish_with_ordering_keys(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() - assert "Published" in out + assert f"Resumed publishing messages with ordering keys to {topic_path}." in out + + +def test_detach_subscription( + subscription_path: str, capsys: CaptureFixture[str] +) -> None: + publisher.detach_subscription(PROJECT_ID, SUBSCRIPTION_ID) + + out, _ = capsys.readouterr() + assert f"{subscription_path} is detached." in out + + +def test_delete(publisher_client: pubsub_v1.PublisherClient) -> None: + publisher.delete_topic(PROJECT_ID, TOPIC_ID) + + C = TypeVar("C", bound=Callable[..., Any]) + + typed_backoff = cast( + Callable[[C], C], + backoff.on_exception(backoff.expo, AssertionError, max_time=MAX_TIME), + ) + + @typed_backoff + def eventually_consistent_test() -> None: + with pytest.raises(Exception): + publisher_client.get_topic( + request={"topic": publisher_client.topic_path(PROJECT_ID, TOPIC_ID)} + ) + + eventually_consistent_test() diff --git a/samples/snippets/quickstart/pub.py b/samples/snippets/quickstart/pub.py index 16432c0c3..7215abd86 100644 --- a/samples/snippets/quickstart/pub.py +++ b/samples/snippets/quickstart/pub.py @@ -14,63 +14,26 @@ # See the License for the specific language governing permissions and # limitations under the License. -# [START pubsub_quickstart_pub_all] import argparse -import time -# [START pubsub_quickstart_pub_deps] from google.cloud import pubsub_v1 -# [END pubsub_quickstart_pub_deps] - -def get_callback(api_future, data, ref): - """Wrap message data in the context of the callback function.""" - - def callback(api_future): - try: - print( - "Published message {} now has message ID {}".format( - data, api_future.result() - ) - ) - ref["num_messages"] += 1 - except Exception: - print( - "A problem occurred when publishing {}: {}\n".format( - data, api_future.exception() - ) - ) - raise - - return callback - - -def pub(project_id, topic_id): +def pub(project_id: str, topic_id: str) -> None: """Publishes a message to a Pub/Sub topic.""" - # [START pubsub_quickstart_pub_client] # Initialize a Publisher client. client = pubsub_v1.PublisherClient() - # [END pubsub_quickstart_pub_client] - # Create a fully qualified identifier in the form of - # `projects/{project_id}/topics/{topic_id}` + # Create a fully qualified identifier of form `projects/{project_id}/topics/{topic_id}` topic_path = client.topic_path(project_id, topic_id) # Data sent to Cloud Pub/Sub must be a bytestring. data = b"Hello, World!" - # Keep track of the number of published messages. - ref = dict({"num_messages": 0}) - # When you publish a message, the client returns a future. - api_future = client.publish(topic_path, data=data) - api_future.add_done_callback(get_callback(api_future, data, ref)) + api_future = client.publish(topic_path, data) + message_id = api_future.result() - # Keep the main thread from exiting while the message future - # gets resolved in the background. - while api_future.running(): - time.sleep(0.5) - print("Published {} message(s).".format(ref["num_messages"])) + print(f"Published {data.decode()} to {topic_path}: {message_id}") if __name__ == "__main__": @@ -83,4 +46,3 @@ def pub(project_id, topic_id): args = parser.parse_args() pub(args.project_id, args.topic_id) -# [END pubsub_quickstart_pub_all] diff --git a/samples/snippets/quickstart/pub_test.py b/samples/snippets/quickstart/pub_test.py deleted file mode 100644 index 6f5cc06c4..000000000 --- a/samples/snippets/quickstart/pub_test.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import uuid - -from google.api_core.exceptions import AlreadyExists -from google.cloud import pubsub_v1 -import pytest - -import pub # noqa - - -UUID = uuid.uuid4().hex -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] -TOPIC = "quickstart-pub-test-topic-" + UUID - - -@pytest.fixture(scope="module") -def publisher_client(): - yield pubsub_v1.PublisherClient() - - -@pytest.fixture(scope="module") -def topic(publisher_client): - topic_path = publisher_client.topic_path(PROJECT, TOPIC) - - try: - publisher_client.create_topic(topic_path) - except AlreadyExists: - pass - - yield TOPIC - - publisher_client.delete_topic(topic_path) - - -def test_pub(publisher_client, topic, capsys): - pub.pub(PROJECT, topic) - - out, _ = capsys.readouterr() - - assert "Hello, World!" in out diff --git a/samples/snippets/quickstart/quickstart_test.py b/samples/snippets/quickstart/quickstart_test.py new file mode 100644 index 000000000..3ed07cf81 --- /dev/null +++ b/samples/snippets/quickstart/quickstart_test.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from typing import Any, Callable, cast, Iterator, TypeVar +import uuid + +from _pytest.capture import CaptureFixture +from flaky import flaky +from google.api_core.exceptions import AlreadyExists +from google.cloud import pubsub_v1 +import pytest + + +UUID = uuid.uuid4().hex +PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] +TOPIC_ID = "quickstart-sub-test-topic-" + UUID +SUBSCRIPTION_ID = "quickstart-sub-test-topic-sub-" + UUID + + +@pytest.fixture(scope="module") +def publisher_client() -> Iterator[pubsub_v1.PublisherClient]: + yield pubsub_v1.PublisherClient() + + +@pytest.fixture(scope="module") +def subscriber_client() -> Iterator[pubsub_v1.SubscriberClient]: + subscriber_client = pubsub_v1.SubscriberClient() + yield subscriber_client + subscriber_client.close() + + +@pytest.fixture(scope="module") +def topic_path(publisher_client: pubsub_v1.PublisherClient) -> Iterator[str]: + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + + try: + topic = publisher_client.create_topic(request={"name": topic_path}) + yield topic.name + except AlreadyExists: + yield topic_path + + publisher_client.delete_topic(request={"topic": topic_path}) + + +@pytest.fixture(scope="module") +def subscription_path( + subscriber_client: pubsub_v1.SubscriberClient, topic_path: str +) -> Iterator[str]: + subscription_path = subscriber_client.subscription_path(PROJECT_ID, SUBSCRIPTION_ID) + + try: + subscription = subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic_path} + ) + yield subscription.name + except AlreadyExists: + yield subscription_path + + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + subscriber_client.close() + + +def test_pub(topic_path: str, capsys: CaptureFixture[str]) -> None: + import pub + + pub.pub(PROJECT_ID, TOPIC_ID) + + out, _ = capsys.readouterr() + assert topic_path in out + assert "Hello, World!" in out + + +C = TypeVar("C", bound=Callable[..., Any]) +_typed_flaky = cast(Callable[[C], C], flaky(max_runs=3, min_passes=1)) + + +@_typed_flaky +def test_sub( + publisher_client: pubsub_v1.PublisherClient, + topic_path: str, + subscription_path: str, + capsys: CaptureFixture[str], +) -> None: + publisher_client.publish(topic_path, b"Hello World!") + + import sub + + sub.sub(PROJECT_ID, SUBSCRIPTION_ID, 10) + + out, _ = capsys.readouterr() + assert f"Listening for messages on {subscription_path}" in out + assert "Received" in out + assert "Acknowledged" in out diff --git a/samples/snippets/quickstart/sub.py b/samples/snippets/quickstart/sub.py index efe008915..fd99aac2d 100644 --- a/samples/snippets/quickstart/sub.py +++ b/samples/snippets/quickstart/sub.py @@ -14,44 +14,38 @@ # See the License for the specific language governing permissions and # limitations under the License. -# [START pubsub_quickstart_sub_all] import argparse +from typing import Optional -# [START pubsub_quickstart_sub_deps] from google.cloud import pubsub_v1 -# [END pubsub_quickstart_sub_deps] - -def sub(project_id, subscription_id): +def sub(project_id: str, subscription_id: str, timeout: Optional[float] = None) -> None: """Receives messages from a Pub/Sub subscription.""" - # [START pubsub_quickstart_sub_client] # Initialize a Subscriber client subscriber_client = pubsub_v1.SubscriberClient() - # [END pubsub_quickstart_sub_client] # Create a fully qualified identifier in the form of # `projects/{project_id}/subscriptions/{subscription_id}` subscription_path = subscriber_client.subscription_path(project_id, subscription_id) - def callback(message): - print( - "Received message {} of message ID {}\n".format(message, message.message_id) - ) + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + print(f"Received {message}.") # Acknowledge the message. Unack'ed messages will be redelivered. message.ack() - print("Acknowledged message {}\n".format(message.message_id)) + print(f"Acknowledged {message.message_id}.") streaming_pull_future = subscriber_client.subscribe( subscription_path, callback=callback ) - print("Listening for messages on {}..\n".format(subscription_path)) + print(f"Listening for messages on {subscription_path}..\n") try: # Calling result() on StreamingPullFuture keeps the main thread from # exiting while messages get processed in the callbacks. - streaming_pull_future.result() + streaming_pull_future.result(timeout=timeout) except: # noqa - streaming_pull_future.cancel() + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. subscriber_client.close() @@ -62,8 +56,10 @@ def callback(message): ) parser.add_argument("project_id", help="Google Cloud project ID") parser.add_argument("subscription_id", help="Pub/Sub subscription ID") + parser.add_argument( + "timeout", default=None, type=float, nargs="?", const=1, help="StreamingPull timeout in seconds" + ) args = parser.parse_args() - sub(args.project_id, args.subscription_id) -# [END pubsub_quickstart_sub_all] + sub(args.project_id, args.subscription_id, args.timeout) diff --git a/samples/snippets/quickstart/sub_test.py b/samples/snippets/quickstart/sub_test.py deleted file mode 100644 index 38047422a..000000000 --- a/samples/snippets/quickstart/sub_test.py +++ /dev/null @@ -1,102 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import os -import uuid - -from google.api_core.exceptions import AlreadyExists -from google.cloud import pubsub_v1 -import mock -import pytest - -import sub # noqa - - -UUID = uuid.uuid4().hex -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] -TOPIC = "quickstart-sub-test-topic-" + UUID -SUBSCRIPTION = "quickstart-sub-test-topic-sub-" + UUID - -publisher_client = pubsub_v1.PublisherClient() -subscriber_client = pubsub_v1.SubscriberClient() - - -@pytest.fixture(scope="module") -def topic_path(): - topic_path = publisher_client.topic_path(PROJECT, TOPIC) - - try: - topic = publisher_client.create_topic(topic_path) - yield topic.name - except AlreadyExists: - yield topic_path - - publisher_client.delete_topic(topic_path) - - -@pytest.fixture(scope="module") -def subscription_path(topic_path): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION) - - try: - subscription = subscriber_client.create_subscription( - subscription_path, topic_path - ) - yield subscription.name - except AlreadyExists: - yield subscription_path - - subscriber_client.delete_subscription(subscription_path) - subscriber_client.close() - - -def _publish_messages(topic_path): - publish_future = publisher_client.publish(topic_path, data=b"Hello World!") - publish_future.result() - - -def test_sub(monkeypatch, topic_path, subscription_path, capsys): - - real_client = pubsub_v1.SubscriberClient() - mock_client = mock.Mock(spec=pubsub_v1.SubscriberClient, wraps=real_client) - - # Attributes on mock_client_constructor uses the corresponding - # attributes on pubsub_v1.SubscriberClient. - mock_client_constructor = mock.create_autospec(pubsub_v1.SubscriberClient) - mock_client_constructor.return_value = mock_client - - monkeypatch.setattr(pubsub_v1, "SubscriberClient", mock_client_constructor) - - def mock_subscribe(subscription_path, callback=None): - real_future = real_client.subscribe(subscription_path, callback=callback) - mock_future = mock.Mock(spec=real_future, wraps=real_future) - - def mock_result(): - return real_future.result(timeout=10) - - mock_future.result.side_effect = mock_result - return mock_future - - mock_client.subscribe.side_effect = mock_subscribe - - _publish_messages(topic_path) - - sub.sub(PROJECT, SUBSCRIPTION) - - out, _ = capsys.readouterr() - assert "Received message" in out - assert "Acknowledged message" in out - - real_client.close() diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index adf26b9f9..7659e3676 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,3 +1,9 @@ -backoff==1.10.0 -pytest==5.3.2 -mock==3.0.5 +backoff==2.2.1 +pytest===7.4.4; python_version == '3.7' +pytest===8.3.5; python_version == '3.8' +pytest==8.4.2; python_version >= '3.9' +mock==5.2.0 +flaky==3.8.1 +google-cloud-bigquery===3.30.0; python_version <= '3.8' +google-cloud-bigquery==3.38.0; python_version >= '3.9' +google-cloud-storage==3.4.0 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 42ab449b1..63a78cd67 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1 +1,13 @@ -google-cloud-pubsub==1.6.1 +google-cloud-pubsub==2.31.1 +avro==1.12.0 +protobuf===4.24.4; python_version == '3.7' +protobuf===5.29.4; python_version == '3.8' +protobuf==6.32.1; python_version >= '3.9' +avro==1.12.0 +opentelemetry-api===1.22.0; python_version == '3.7' +opentelemetry-sdk===1.22.0; python_version == '3.7' +opentelemetry-api===1.33.1; python_version == '3.8' +opentelemetry-sdk===1.33.1; python_version == '3.8' +opentelemetry-api==1.37.0; python_version >= '3.9' +opentelemetry-sdk==1.37.0; python_version >= '3.9' +opentelemetry-exporter-gcp-trace==1.9.0 diff --git a/samples/snippets/resources/us-states-plus.avsc b/samples/snippets/resources/us-states-plus.avsc new file mode 100644 index 000000000..74225ae7e --- /dev/null +++ b/samples/snippets/resources/us-states-plus.avsc @@ -0,0 +1,24 @@ +{ + "type":"record", + "name":"State", + "namespace":"utilities", + "doc":"A list of states in the United States of America.", + "fields":[ + { + "name":"name", + "type":"string", + "doc":"The common name of the state." + }, + { + "name":"post_abbr", + "type":"string", + "doc":"The postal code abbreviation of the state." + }, + { + "name":"population", + "type":"long", + "default":0, + "doc":"The population of the state." + } + ] +} diff --git a/samples/snippets/resources/us-states-plus.proto b/samples/snippets/resources/us-states-plus.proto new file mode 100644 index 000000000..9f845d9f4 --- /dev/null +++ b/samples/snippets/resources/us-states-plus.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package utilities; + +message StateProto { + string name = 1; + string post_abbr = 2; + int64 population = 3; +} diff --git a/samples/snippets/resources/us-states.avsc b/samples/snippets/resources/us-states.avsc new file mode 100644 index 000000000..7521882c7 --- /dev/null +++ b/samples/snippets/resources/us-states.avsc @@ -0,0 +1,18 @@ +{ + "type":"record", + "name":"State", + "namespace":"utilities", + "doc":"A list of states in the United States of America.", + "fields":[ + { + "name":"name", + "type":"string", + "doc":"The common name of the state." + }, + { + "name":"post_abbr", + "type":"string", + "doc":"The postal code abbreviation of the state." + } + ] +} \ No newline at end of file diff --git a/samples/snippets/resources/us-states.proto b/samples/snippets/resources/us-states.proto new file mode 100644 index 000000000..576c2ea1c --- /dev/null +++ b/samples/snippets/resources/us-states.proto @@ -0,0 +1,8 @@ +syntax = "proto3"; + +package utilities; + +message StateProto { + string name = 1; + string post_abbr = 2; +} \ No newline at end of file diff --git a/samples/snippets/schema.py b/samples/snippets/schema.py new file mode 100644 index 000000000..b492ccf33 --- /dev/null +++ b/samples/snippets/schema.py @@ -0,0 +1,871 @@ +#!/usr/bin/env python + +# Copyright 2023 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic schema operations +using the Cloud Pub/Sub API. + +For more information, see the README.md under /pubsub and the documentation +at https://cloud.google.com/pubsub/docs/schemas. +""" + +import argparse +from typing import Optional + +from google.cloud import pubsub_v1 + + +def create_avro_schema(project_id: str, schema_id: str, avsc_file: str) -> None: + """Create a schema resource from a JSON-formatted Avro schema file.""" + # [START pubsub_create_avro_schema] + from google.api_core.exceptions import AlreadyExists + from google.cloud.pubsub import SchemaServiceClient + from google.pubsub_v1.types import Schema + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + # avsc_file = "path/to/an/avro/schema/file/(.avsc)/formatted/in/json" + + project_path = f"projects/{project_id}" + + # Read a JSON-formatted Avro schema file as a string. + with open(avsc_file, "rb") as f: + avsc_source = f.read().decode("utf-8") + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + schema = Schema(name=schema_path, type_=Schema.Type.AVRO, definition=avsc_source) + + try: + result = schema_client.create_schema( + request={"parent": project_path, "schema": schema, "schema_id": schema_id} + ) + print(f"Created a schema using an Avro schema file:\n{result}") + return result + except AlreadyExists: + print(f"{schema_id} already exists.") + # [END pubsub_create_avro_schema] + + +def create_proto_schema(project_id: str, schema_id: str, proto_file: str) -> None: + """Create a schema resource from a protobuf schema file.""" + # [START pubsub_create_proto_schema] + from google.api_core.exceptions import AlreadyExists + from google.cloud.pubsub import SchemaServiceClient + from google.pubsub_v1.types import Schema + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + # proto_file = "path/to/a/proto/file/(.proto)/formatted/in/protocol/buffers" + + project_path = f"projects/{project_id}" + + # Read a protobuf schema file as a string. + with open(proto_file, "rb") as f: + proto_source = f.read().decode("utf-8") + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + schema = Schema( + name=schema_path, type_=Schema.Type.PROTOCOL_BUFFER, definition=proto_source + ) + + try: + result = schema_client.create_schema( + request={"parent": project_path, "schema": schema, "schema_id": schema_id} + ) + print(f"Created a schema using a protobuf schema file:\n{result}") + return result + except AlreadyExists: + print(f"{schema_id} already exists.") + # [END pubsub_create_proto_schema] + + +def commit_avro_schema(project_id: str, schema_id: str, avsc_file: str) -> None: + """Commit a schema resource from a JSON-formatted Avro schema file.""" + # [START pubsub_commit_avro_schema] + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import SchemaServiceClient + from google.pubsub_v1.types import Schema + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + # avsc_file = "path/to/an/avro/schema/file/(.avsc)/formatted/in/json" + + # Read a JSON-formatted Avro schema file as a string. + with open(avsc_file, "rb") as f: + avsc_source = f.read().decode("utf-8") + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + schema = Schema(name=schema_path, type_=Schema.Type.AVRO, definition=avsc_source) + + try: + result = schema_client.commit_schema( + request={"schema": schema, "name": schema_path} + ) + print(f"Committed a schema revision using an Avro schema file:\n{result}") + return result + except NotFound: + print(f"{schema_id} does not exist.") + # [END pubsub_commit_avro_schema] + + +def commit_proto_schema(project_id: str, schema_id: str, proto_file: str) -> None: + """Commit a schema revision from a protobuf schema file.""" + # [START pubsub_commit_proto_schema] + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import SchemaServiceClient + from google.pubsub_v1.types import Schema + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + # proto_file = "path/to/a/proto/file/(.proto)/formatted/in/protocol/buffers" + + # Read a protobuf schema file as a string. + with open(proto_file, "rb") as f: + proto_source = f.read().decode("utf-8") + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + schema = Schema( + name=schema_path, type_=Schema.Type.PROTOCOL_BUFFER, definition=proto_source + ) + + try: + result = schema_client.commit_schema( + request={"schema": schema, "name": schema_path} + ) + print(f"Committed a schema revision using a protobuf schema file:\n{result}") + return result + except NotFound: + print(f"{schema_id} does not exist.") + # [END pubsub_commit_proto_schema] + + +def get_schema(project_id: str, schema_id: str) -> None: + """Get a schema resource.""" + # [START pubsub_get_schema] + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import SchemaServiceClient + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + + try: + result = schema_client.get_schema(request={"name": schema_path}) + print(f"Got a schema:\n{result}") + except NotFound: + print(f"{schema_id} not found.") + # [END pubsub_get_schema] + + +def get_schema_revision( + project_id: str, schema_id: str, schema_revision_id: str +) -> None: + """Get a schema revision.""" + # [START pubsub_get_schema_revision] + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import SchemaServiceClient + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + # schema_revision_id = "your-schema-revision-id" + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path( + project_id, schema_id + "@" + schema_revision_id + ) + + try: + result = schema_client.get_schema(request={"name": schema_path}) + print(f"Got a schema revision:\n{result}") + except NotFound: + print(f"{schema_id} not found.") + # [END pubsub_get_schema_revision] + + +def list_schemas(project_id: str) -> None: + """List schema resources.""" + # [START pubsub_list_schemas] + from google.cloud.pubsub import SchemaServiceClient + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + + project_path = f"projects/{project_id}" + schema_client = SchemaServiceClient() + + for schema in schema_client.list_schemas(request={"parent": project_path}): + print(schema) + + print("Listed schemas.") + # [END pubsub_list_schemas] + + +def list_schema_revisions(project_id: str, schema_id: str) -> None: + """List schema revisions for a schema resource.""" + # [START pubsub_list_schema_revisions] + from google.cloud.pubsub import SchemaServiceClient + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + + for schema in schema_client.list_schema_revisions(request={"name": schema_path}): + print(schema) + + print("Listed schema revisions.") + # [END pubsub_list_schema_revisions] + + +def rollback_schema_revision( + project_id: str, schema_id: str, schema_revision_id: str +) -> None: + """Roll back a schema revision.""" + # [START pubsub_rollback_schema] + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import SchemaServiceClient + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + # schema_revision_id = "your-schema-revision-id" + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + + try: + result = schema_client.rollback_schema( + request={"name": schema_path, "revision_id": schema_revision_id} + ) + print(f"Rolled back a schema revision:\n{result}") + except NotFound: + print(f"{schema_id} not found.") + # [END pubsub_rollback_schema] + + +def delete_schema(project_id: str, schema_id: str) -> None: + """Delete a schema resource.""" + # [START pubsub_delete_schema] + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import SchemaServiceClient + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + + try: + schema_client.delete_schema(request={"name": schema_path}) + print(f"Deleted a schema:\n{schema_path}") + except NotFound: + print(f"{schema_id} not found.") + # [END pubsub_delete_schema] + + +def delete_schema_revision(project_id: str, schema_id: str, revision_id: str) -> None: + """Delete a schema revision.""" + # [START pubsub_delete_schema_revision] + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import SchemaServiceClient + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + # revision_id = "your-revision-id" + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id + "@" + revision_id) + + try: + schema_client.delete_schema_revision(request={"name": schema_path}) + print(f"Deleted a schema revision:\n{schema_path}") + except NotFound: + print(f"{schema_id} not found.") + # [END pubsub_delete_schema_revision] + + +def create_topic_with_schema( + project_id: str, topic_id: str, schema_id: str, message_encoding: str +) -> None: + """Create a topic resource with a schema.""" + # [START pubsub_create_topic_with_schema] + from google.api_core.exceptions import AlreadyExists, InvalidArgument + from google.cloud.pubsub import PublisherClient, SchemaServiceClient + from google.pubsub_v1.types import Encoding + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # schema_id = "your-schema-id" + # Choose either BINARY or JSON as valid message encoding in this topic. + # message_encoding = "BINARY" + + publisher_client = PublisherClient() + topic_path = publisher_client.topic_path(project_id, topic_id) + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + + if message_encoding == "BINARY": + encoding = Encoding.BINARY + elif message_encoding == "JSON": + encoding = Encoding.JSON + else: + encoding = Encoding.ENCODING_UNSPECIFIED + + try: + response = publisher_client.create_topic( + request={ + "name": topic_path, + "schema_settings": {"schema": schema_path, "encoding": encoding}, + } + ) + print(f"Created a topic:\n{response}") + + except AlreadyExists: + print(f"{topic_id} already exists.") + except InvalidArgument: + print("Schema settings are not valid.") + # [END pubsub_create_topic_with_schema] + + +def update_topic_schema( + project_id: str, topic_id: str, first_revision_id: str, last_revision_id: str +) -> None: + """Update a topic resource's first schema revision.""" + # [START pubsub_update_topic_schema] + from google.api_core.exceptions import InvalidArgument, NotFound + from google.cloud.pubsub import PublisherClient + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # first_revision_id = "your-revision-id" + # last_revision_id = "your-revision-id" + + publisher_client = PublisherClient() + topic_path = publisher_client.topic_path(project_id, topic_id) + + try: + response = publisher_client.update_topic( + request={ + "topic": { + "name": topic_path, + "schema_settings": { + "first_revision_id": first_revision_id, + "last_revision_id": last_revision_id, + }, + }, + "update_mask": "schemaSettings.firstRevisionId,schemaSettings.lastRevisionId", + } + ) + print(f"Updated a topic schema:\n{response}") + + except NotFound: + print(f"{topic_id} not found.") + except InvalidArgument: + print("Schema settings are not valid.") + # [END pubsub_update_topic_schema] + + +def create_topic_with_schema_revisions( + project_id: str, + topic_id: str, + schema_id: str, + first_revision_id: str, + last_revision_id: str, + message_encoding: str, +) -> None: + """Create a topic resource with a schema.""" + # [START pubsub_create_topic_with_schema_revisions] + from google.api_core.exceptions import AlreadyExists, InvalidArgument + from google.cloud.pubsub import PublisherClient, SchemaServiceClient + from google.pubsub_v1.types import Encoding + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # schema_id = "your-schema-id" + # first_revision_id = "your-revision-id" + # last_revision_id = "your-revision-id" + # Choose either BINARY or JSON as valid message encoding in this topic. + # message_encoding = "BINARY" + + publisher_client = PublisherClient() + topic_path = publisher_client.topic_path(project_id, topic_id) + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + + if message_encoding == "BINARY": + encoding = Encoding.BINARY + elif message_encoding == "JSON": + encoding = Encoding.JSON + else: + encoding = Encoding.ENCODING_UNSPECIFIED + + try: + response = publisher_client.create_topic( + request={ + "name": topic_path, + "schema_settings": { + "schema": schema_path, + "encoding": encoding, + "first_revision_id": first_revision_id, + "last_revision_id": last_revision_id, + }, + } + ) + print(f"Created a topic:\n{response}") + + except AlreadyExists: + print(f"{topic_id} already exists.") + except InvalidArgument: + print("Please choose either BINARY or JSON as a valid message encoding type.") + # [END pubsub_create_topic_with_schema_revisions] + + +def publish_avro_records(project_id: str, topic_id: str, avsc_file: str) -> None: + """Pulbish a BINARY or JSON encoded message to a topic configured with an Avro schema.""" + # [START pubsub_publish_avro_records] + from avro.io import BinaryEncoder, DatumWriter + import avro.schema as schema + import io + import json + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import PublisherClient + from google.pubsub_v1.types import Encoding + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # avsc_file = "path/to/an/avro/schema/file/(.avsc)/formatted/in/json" + + publisher_client = PublisherClient() + topic_path = publisher_client.topic_path(project_id, topic_id) + + # Prepare to write Avro records to the binary output stream. + with open(avsc_file, "rb") as file: + avro_schema = schema.parse(file.read()) + writer = DatumWriter(avro_schema) + bout = io.BytesIO() + + # Prepare some data using a Python dictionary that matches the Avro schema + record = {"name": "Alaska", "post_abbr": "AK"} + + try: + # Get the topic encoding type. + topic = publisher_client.get_topic(request={"topic": topic_path}) + encoding = topic.schema_settings.encoding + + # Encode the data according to the message serialization type. + if encoding == Encoding.BINARY: + encoder = BinaryEncoder(bout) + writer.write(record, encoder) + data = bout.getvalue() + print(f"Preparing a binary-encoded message:\n{data.decode()}") + elif encoding == Encoding.JSON: + data_str = json.dumps(record) + print(f"Preparing a JSON-encoded message:\n{data_str}") + data = data_str.encode("utf-8") + else: + print(f"No encoding specified in {topic_path}. Abort.") + exit(0) + + future = publisher_client.publish(topic_path, data) + print(f"Published message ID: {future.result()}") + + except NotFound: + print(f"{topic_id} not found.") + # [END pubsub_publish_avro_records] + + +def publish_proto_messages(project_id: str, topic_id: str) -> None: + """Publish a BINARY or JSON encoded message to a topic configured with a protobuf schema.""" + # [START pubsub_publish_proto_messages] + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import PublisherClient + from google.protobuf.json_format import MessageToJson + from google.pubsub_v1.types import Encoding + + from utilities import us_states_pb2 # type: ignore + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + publisher_client = PublisherClient() + topic_path = publisher_client.topic_path(project_id, topic_id) + + try: + # Get the topic encoding type. + topic = publisher_client.get_topic(request={"topic": topic_path}) + encoding = topic.schema_settings.encoding + + # Instantiate a protoc-generated class defined in `us-states.proto`. + state = us_states_pb2.StateProto() + state.name = "Alaska" + state.post_abbr = "AK" + + # Encode the data according to the message serialization type. + if encoding == Encoding.BINARY: + data = state.SerializeToString() + print(f"Preparing a binary-encoded message:\n{data}") + elif encoding == Encoding.JSON: + json_object = MessageToJson(state) + data = str(json_object).encode("utf-8") + print(f"Preparing a JSON-encoded message:\n{data}") + else: + print(f"No encoding specified in {topic_path}. Abort.") + exit(0) + + future = publisher_client.publish(topic_path, data) + print(f"Published message ID: {future.result()}") + + except NotFound: + print(f"{topic_id} not found.") + # [END pubsub_publish_proto_messages] + + +def subscribe_with_avro_schema( + project_id: str, + subscription_id: str, + avsc_file: str, + timeout: Optional[float] = None, +) -> None: + """Receive and decode messages sent to a topic with an Avro schema.""" + # [START pubsub_subscribe_avro_records] + import avro.schema as schema + from avro.io import BinaryDecoder, DatumReader + from concurrent.futures import TimeoutError + import io + import json + from google.cloud.pubsub import SubscriberClient + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # avsc_file = "path/to/an/avro/schema/file/(.avsc)/formatted/in/json" + # Number of seconds the subscriber listens for messages + # timeout = 5.0 + + subscriber = SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + with open(avsc_file, "rb") as file: + avro_schema = schema.parse(file.read()) + + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + # Get the message serialization type. + encoding = message.attributes.get("googclient_schemaencoding") + # Deserialize the message data accordingly. + if encoding == "BINARY": + bout = io.BytesIO(message.data) + decoder = BinaryDecoder(bout) + reader = DatumReader(avro_schema) + message_data = reader.read(decoder) + print(f"Received a binary-encoded message:\n{message_data}") + elif encoding == "JSON": + message_data = json.loads(message.data) + print(f"Received a JSON-encoded message:\n{message_data}") + else: + print(f"Received a message with no encoding:\n{message}") + + message.ack() + + streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) + print(f"Listening for messages on {subscription_path}..\n") + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception occurs first. + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. + # [END pubsub_subscribe_avro_records] + + +def subscribe_with_avro_schema_with_revisions( + project_id: str, + subscription_id: str, + avsc_file: str, + timeout: Optional[float] = None, +) -> None: + """Receive and decode messages sent to a topic with an Avro schema.""" + # [START pubsub_subscribe_avro_records_with_revisions] + import avro.schema as schema + from avro.io import BinaryDecoder, DatumReader + from concurrent.futures import TimeoutError + import io + import json + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import SchemaServiceClient, SubscriberClient + + schema_client = SchemaServiceClient() + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # avsc_file = "path/to/an/avro/schema/file/(.avsc)/formatted/in/json" + # Number of seconds the subscriber listens for messages + # timeout = 5.0 + + subscriber = SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + with open(avsc_file, "rb") as file: + reader_avro_schema = schema.parse(file.read()) + # Dict to keep readers for different schema revisions. + revisions_to_readers = {} + + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + # Get the message serialization type. + schema_name = message.attributes.get("googclient_schemaname") + schema_revision_id = message.attributes.get("googclient_schemarevisionid") + encoding = message.attributes.get("googclient_schemaencoding") + + if schema_revision_id not in revisions_to_readers: + schema_path = schema_name + "@" + schema_revision_id + try: + received_avro_schema = schema_client.get_schema( + request={"name": schema_path} + ) + except NotFound: + print(f"{schema_path} not found.") + message.nack() + return + writer_avro_schema = schema.parse(received_avro_schema.definition) + revisions_to_readers[schema_revision_id] = DatumReader( + writer_avro_schema, reader_avro_schema + ) + reader = revisions_to_readers[schema_revision_id] + + # Deserialize the message data accordingly. + if encoding == "BINARY": + bout = io.BytesIO(message.data) + decoder = BinaryDecoder(bout) + message_data = reader.read(decoder) + print(f"Received a binary-encoded message:\n{message_data}") + elif encoding == "JSON": + message_data = json.loads(message.data) + print(f"Received a JSON-encoded message:\n{message_data}") + else: + print(f"Received a message with no encoding:\n{message}") + message.nack() + + message.ack() + + streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) + print(f"Listening for messages on {subscription_path}..\n") + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception occurs first. + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. + # [END pubsub_subscribe_avro_records_with_revisions] + + +def subscribe_with_proto_schema( + project_id: str, subscription_id: str, timeout: float +) -> None: + """Receive and decode messages sent to a topic with a protobuf schema.""" + # [[START pubsub_subscribe_proto_messages] + from concurrent.futures import TimeoutError + from google.cloud.pubsub import SubscriberClient + from google.protobuf.json_format import Parse + + from utilities import us_states_pb2 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # Number of seconds the subscriber listens for messages + # timeout = 5.0 + + subscriber = SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + # Instantiate a protoc-generated class defined in `us-states.proto`. + state = us_states_pb2.StateProto() + + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + # Get the message serialization type. + encoding = message.attributes.get("googclient_schemaencoding") + # Deserialize the message data accordingly. + if encoding == "BINARY": + state.ParseFromString(message.data) + print(f"Received a binary-encoded message:\n{state}") + elif encoding == "JSON": + Parse(message.data, state) + print(f"Received a JSON-encoded message:\n{state}") + else: + print(f"Received a message with no encoding:\n{message}") + + message.ack() + + streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) + print(f"Listening for messages on {subscription_path}..\n") + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception occurs first. + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. + # [END pubsub_subscribe_proto_messages] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument("project_id", help="Your Google Cloud project ID") + + subparsers = parser.add_subparsers(dest="command") + + create_avro_schema_parser = subparsers.add_parser( + "create-avro", help=create_avro_schema.__doc__ + ) + create_avro_schema_parser.add_argument("schema_id") + create_avro_schema_parser.add_argument("avsc_file") + + create_proto_schema_parser = subparsers.add_parser( + "create-proto", help=create_proto_schema.__doc__ + ) + create_proto_schema_parser.add_argument("schema_id") + create_proto_schema_parser.add_argument("proto_file") + + get_schema_parser = subparsers.add_parser("get", help=get_schema.__doc__) + get_schema_parser.add_argument("schema_id") + + list_schemas_parser = subparsers.add_parser("list", help=list_schemas.__doc__) + + delete_schema_parser = subparsers.add_parser("delete", help=delete_schema.__doc__) + delete_schema_parser.add_argument("schema_id") + + create_topic_with_schema_parser = subparsers.add_parser( + "create-topic", help=create_topic_with_schema.__doc__ + ) + create_topic_with_schema_parser.add_argument("topic_id") + create_topic_with_schema_parser.add_argument("schema_id") + create_topic_with_schema_parser.add_argument( + "message_encoding", choices=["BINARY", "JSON"] + ) + + publish_avro_records_parser = subparsers.add_parser( + "publish-avro", help=publish_avro_records.__doc__ + ) + publish_avro_records_parser.add_argument("topic_id") + publish_avro_records_parser.add_argument("avsc_file") + + publish_proto_messages_parser = subparsers.add_parser( + "publish-proto", help=publish_proto_messages.__doc__ + ) + publish_proto_messages_parser.add_argument("topic_id") + + subscribe_with_avro_schema_parser = subparsers.add_parser( + "receive-avro", help=subscribe_with_avro_schema.__doc__ + ) + subscribe_with_avro_schema_parser.add_argument("subscription_id") + subscribe_with_avro_schema_parser.add_argument("avsc_file") + subscribe_with_avro_schema_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) + + subscribe_with_proto_schema_parser = subparsers.add_parser( + "receive-proto", help=subscribe_with_proto_schema.__doc__ + ) + subscribe_with_proto_schema_parser.add_argument("subscription_id") + subscribe_with_proto_schema_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) + + args = parser.parse_args() + + if args.command == "create-avro": + create_avro_schema(args.project_id, args.schema_id, args.avsc_file) + if args.command == "create-proto": + create_proto_schema(args.project_id, args.schema_id, args.proto_file) + if args.command == "commit-avro": + commit_avro_schema(args.project_id, args.schema_id, args.avsc_file) + if args.command == "commit-proto": + commit_proto_schema(args.project_id, args.schema_id, args.proto_file) + if args.command == "get": + get_schema(args.project_id, args.schema_id) + if args.command == "get-revision": + get_schema_revision(args.project_id, args.schema_id, args.revision_id) + if args.command == "list": + list_schemas(args.project_id) + if args.command == "list-revisions": + list_schema_revisions(args.project_id, args.schema_id) + if args.command == "delete": + delete_schema(args.project_id, args.schema_id) + if args.command == "delete-revision": + delete_schema_revision(args.project_id, args.schema_id, args.revision_id) + if args.command == "create-topic": + create_topic_with_schema( + args.project_id, args.topic_id, args.schema_id, args.message_encoding + ) + if args.command == "create-topic-with-revisions": + create_topic_with_schema_revisions( + args.project_id, + args.topic_id, + args.schema_id, + args.first_revision_id, + args.last_revision_id, + args.message_encoding, + ) + if args.command == "publish-avro": + publish_avro_records(args.project_id, args.topic_id, args.avsc_file) + if args.command == "publish-proto": + publish_proto_messages(args.project_id, args.topic_id) + if args.command == "receive-avro": + subscribe_with_avro_schema( + args.project_id, args.subscription_id, args.avsc_file, args.timeout + ) + if args.command == "receive-avro-with-revisions": + subscribe_with_avro_schema_with_revisions( + args.project_id, args.subscription_id, args.avsc_file, args.timeout + ) + if args.command == "receive-proto": + subscribe_with_proto_schema(args.project_id, args.subscription_id, args.timeout) diff --git a/samples/snippets/schema_test.py b/samples/snippets/schema_test.py new file mode 100644 index 000000000..ccf65034f --- /dev/null +++ b/samples/snippets/schema_test.py @@ -0,0 +1,563 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from typing import Any, Callable, Generator, TypeVar, cast +import uuid + +from _pytest.capture import CaptureFixture +from flaky import flaky +from google.api_core.exceptions import InternalServerError, NotFound +from google.cloud import pubsub_v1 +from google.cloud.pubsub import PublisherClient, SchemaServiceClient, SubscriberClient +from google.pubsub_v1.types import Encoding, Schema, Topic +import pytest + +import schema + +# This uuid is shared across tests which run in parallel. +UUID = uuid.uuid4().hex +try: + PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] +except KeyError: + raise KeyError("Need to set GOOGLE_CLOUD_PROJECT as an environment variable.") +AVRO_TOPIC_ID = f"schema-test-avro-topic-{UUID}" +AVRO_TOPIC_ID_TO_CREATE = f"schema-test-avro-topic-to-create-{UUID}" +PROTO_TOPIC_ID = f"schema-test-proto-topic-{UUID}" +PROTO_WITH_REVISIONS_TOPIC_ID = f"schema-test-proto-with-revisions-topic-{UUID}" +PROTO_WITH_REVISIONS_TOPIC_ID_TO_CREATE = ( + f"schema-test-proto-with-revisions-topic-to-create-{UUID}" +) +AVRO_SUBSCRIPTION_ID = f"schema-test-avro-subscription-{UUID}" +PROTO_SUBSCRIPTION_ID = f"schema-test-proto-subscription-{UUID}" +AVRO_SCHEMA_ID = f"schema-test-avro-schema-{UUID}" +AVRO_SCHEMA_ID_TO_CREATE = f"schema-test-avro-schema-to-create-{UUID}" +PROTO_SCHEMA_ID = f"schema-test-proto-schema-{UUID}" +PROTO_SCHEMA_ID_TO_CREATE = f"schema-test-proto-schema-to-create-{UUID}" +PROTO_SCHEMA_ID_TO_DELETE = f"schema-test-proto-schema-to-delete-{UUID}" +AVSC_FILE = "resources/us-states.avsc" +AVSC_REVISION_FILE = "resources/us-states.avsc" +PROTO_FILE = "resources/us-states.proto" +PROTO_REVISION_FILE = "resources/us-states.proto" + +# These tests run in parallel in continuous integration, +# with the same UUID. +# Avoid modifying resources that are shared across tests, +# as this results in test flake. + + +@pytest.fixture(scope="module") +def schema_client() -> Generator[pubsub_v1.SchemaServiceClient, None, None]: + schema_client = SchemaServiceClient() + yield schema_client + + +def ensure_schema_exists( + name: str, type: Schema.Type, schema_client: pubsub_v1.SchemaServiceClient +) -> Schema: + schema_path = schema_client.schema_path(PROJECT_ID, name) + + try: + return schema_client.get_schema(request={"name": schema_path}) + except NotFound: + project_path = f"projects/{PROJECT_ID}" + with open(AVSC_FILE if type == Schema.Type.AVRO else PROTO_FILE, "rb") as f: + definition_text = f.read().decode("utf-8") + schema = Schema(name=schema_path, type_=type, definition=definition_text) + return schema_client.create_schema( + request={"parent": project_path, "schema": schema, "schema_id": name} + ) + + +@pytest.fixture(scope="module") +def avro_schema( + schema_client: pubsub_v1.SchemaServiceClient, +) -> Generator[str, None, None]: + avro_schema = ensure_schema_exists(AVRO_SCHEMA_ID, Schema.Type.AVRO, schema_client) + + yield avro_schema.name + + try: + schema_client.delete_schema(request={"name": avro_schema.name}) + except (NotFound, InternalServerError): + pass + + +@pytest.fixture(scope="module") +def avro_schema_to_create( + schema_client: pubsub_v1.SchemaServiceClient, +) -> Generator[str, None, None]: + avro_schema_path = schema_client.schema_path(PROJECT_ID, AVRO_SCHEMA_ID_TO_CREATE) + + yield avro_schema_path + + try: + schema_client.delete_schema(request={"name": avro_schema_path}) + except (NotFound, InternalServerError): + pass + + +@pytest.fixture(scope="module") +def proto_schema( + schema_client: pubsub_v1.SchemaServiceClient, +) -> Generator[str, None, None]: + proto_schema = ensure_schema_exists( + PROTO_SCHEMA_ID, Schema.Type.PROTOCOL_BUFFER, schema_client + ) + + yield proto_schema.name + + try: + schema_client.delete_schema(request={"name": proto_schema.name}) + except (NotFound, InternalServerError): + pass + + +@pytest.fixture(scope="module") +def proto_schema_to_delete( + schema_client: pubsub_v1.SchemaServiceClient, +) -> Generator[str, None, None]: + proto_schema = ensure_schema_exists( + PROTO_SCHEMA_ID_TO_DELETE, Schema.Type.PROTOCOL_BUFFER, schema_client + ) + + yield proto_schema.name + + try: + schema_client.delete_schema(request={"name": proto_schema.name}) + except (NotFound, InternalServerError): + pass + + +@pytest.fixture(scope="module") +def proto_schema_to_create( + schema_client: pubsub_v1.SchemaServiceClient, +) -> Generator[str, None, None]: + proto_schema_path = schema_client.schema_path(PROJECT_ID, PROTO_SCHEMA_ID_TO_CREATE) + + yield proto_schema_path + + try: + schema_client.delete_schema(request={"name": proto_schema_path}) + except (NotFound, InternalServerError): + pass + + +@pytest.fixture(scope="module") +def publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: + yield PublisherClient() + + +def ensure_topic_exists( + name: str, + schema_path: str, + encoding: Encoding, + publisher_client: pubsub_v1.PublisherClient, +) -> Topic: + topic_path = publisher_client.topic_path(PROJECT_ID, name) + + try: + return publisher_client.get_topic(request={"topic": topic_path}) + except NotFound: + return publisher_client.create_topic( + request={ + "name": topic_path, + "schema_settings": { + "schema": schema_path, + "encoding": encoding, + }, + } + ) + + +@pytest.fixture(scope="module") +def avro_topic( + publisher_client: pubsub_v1.PublisherClient, avro_schema: str +) -> Generator[str, None, None]: + avro_topic = ensure_topic_exists( + AVRO_TOPIC_ID, avro_schema, Encoding.BINARY, publisher_client + ) + + yield avro_topic.name + try: + publisher_client.delete_topic(request={"topic": avro_topic.name}) + except NotFound: + pass + + +@pytest.fixture(scope="module") +def avro_topic_to_create( + publisher_client: pubsub_v1.PublisherClient, avro_schema: str +) -> Generator[str, None, None]: + avro_topic_path = publisher_client.topic_path(PROJECT_ID, AVRO_TOPIC_ID_TO_CREATE) + + yield avro_topic_path + try: + publisher_client.delete_topic(request={"topic": avro_topic_path}) + except NotFound: + pass + + +@pytest.fixture(scope="module") +def proto_topic( + publisher_client: pubsub_v1.PublisherClient, proto_schema: str +) -> Generator[str, None, None]: + proto_topic = ensure_topic_exists( + PROTO_TOPIC_ID, proto_schema, Encoding.BINARY, publisher_client + ) + + yield proto_topic.name + try: + publisher_client.delete_topic(request={"topic": proto_topic.name}) + except NotFound: + pass + + +@pytest.fixture(scope="module") +def proto_with_revisions_topic( + publisher_client: pubsub_v1.PublisherClient, proto_schema: str +) -> Generator[str, None, None]: + proto_topic = ensure_topic_exists( + PROTO_WITH_REVISIONS_TOPIC_ID, proto_schema, Encoding.BINARY, publisher_client + ) + + yield proto_topic.name + try: + publisher_client.delete_topic(request={"topic": proto_topic.name}) + except NotFound: + pass + + +@pytest.fixture(scope="module") +def proto_with_revisions_topic_to_create( + publisher_client: pubsub_v1.PublisherClient, proto_schema: str +) -> Generator[str, None, None]: + topic_path = publisher_client.topic_path( + PROJECT_ID, PROTO_WITH_REVISIONS_TOPIC_ID_TO_CREATE + ) + + yield topic_path + try: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: + pass + + +@pytest.fixture(scope="module") +def subscriber_client() -> Generator[pubsub_v1.SubscriberClient, None, None]: + subscriber_client = SubscriberClient() + yield subscriber_client + subscriber_client.close() + + +@pytest.fixture(scope="module") +def avro_subscription( + subscriber_client: pubsub_v1.SubscriberClient, avro_topic: str +) -> Generator[str, None, None]: + avro_subscription_path = subscriber_client.subscription_path( + PROJECT_ID, AVRO_SUBSCRIPTION_ID + ) + + try: + avro_subscription = subscriber_client.get_subscription( + request={"subscription": avro_subscription_path} + ) + except NotFound: + avro_subscription = subscriber_client.create_subscription( + request={"name": avro_subscription_path, "topic": avro_topic} + ) + + yield avro_subscription.name + + try: + subscriber_client.delete_subscription( + request={"subscription": avro_subscription.name} + ) + except NotFound: + pass + + +@pytest.fixture(scope="module") +def proto_subscription( + subscriber_client: pubsub_v1.SubscriberClient, proto_topic: str +) -> Generator[str, None, None]: + proto_subscription_path = subscriber_client.subscription_path( + PROJECT_ID, PROTO_SUBSCRIPTION_ID + ) + + try: + proto_subscription = subscriber_client.get_subscription( + request={"subscription": proto_subscription_path} + ) + except NotFound: + proto_subscription = subscriber_client.create_subscription( + request={"name": proto_subscription_path, "topic": proto_topic} + ) + + yield proto_subscription.name + + try: + subscriber_client.delete_subscription( + request={"subscription": proto_subscription.name} + ) + except NotFound: + pass + + +def test_create_avro_schema( + schema_client: pubsub_v1.SchemaServiceClient, + avro_schema_to_create: str, + capsys: CaptureFixture[str], +) -> None: + try: + schema_client.delete_schema(request={"name": avro_schema_to_create}) + except NotFound: + pass + + schema.create_avro_schema(PROJECT_ID, AVRO_SCHEMA_ID_TO_CREATE, AVSC_FILE) + + out, _ = capsys.readouterr() + assert "Created a schema using an Avro schema file:" in out + assert f"{avro_schema_to_create}" in out + + +def test_create_proto_schema( + schema_client: pubsub_v1.SchemaServiceClient, + proto_schema_to_create: str, + capsys: CaptureFixture[str], +) -> None: + try: + schema_client.delete_schema(request={"name": proto_schema_to_create}) + except NotFound: + pass + + schema.create_proto_schema(PROJECT_ID, PROTO_SCHEMA_ID_TO_CREATE, PROTO_FILE) + + out, _ = capsys.readouterr() + assert "Created a schema using a protobuf schema file:" in out + assert f"{proto_schema_to_create}" in out + + +def test_commit_avro_schema( + schema_client: pubsub_v1.SchemaServiceClient, + avro_schema: str, + capsys: CaptureFixture[str], +) -> None: + schema.commit_avro_schema(PROJECT_ID, AVRO_SCHEMA_ID, AVSC_REVISION_FILE) + + out, _ = capsys.readouterr() + assert "Committed a schema revision using an Avro schema file:" in out + # assert f"{avro_schema}" in out + + +def test_commit_proto_schema( + schema_client: pubsub_v1.SchemaServiceClient, + proto_schema: str, + capsys: CaptureFixture[str], +) -> None: + schema.commit_proto_schema(PROJECT_ID, PROTO_SCHEMA_ID, PROTO_REVISION_FILE) + + out, _ = capsys.readouterr() + assert "Committed a schema revision using a protobuf schema file:" in out + # assert f"{proto_schema}" in out + + +def test_get_schema(avro_schema: str, capsys: CaptureFixture[str]) -> None: + schema.get_schema(PROJECT_ID, AVRO_SCHEMA_ID) + out, _ = capsys.readouterr() + assert "Got a schema" in out + assert f"{avro_schema}" in out + + +def test_get_schema_revision(avro_schema: str, capsys: CaptureFixture[str]) -> None: + committed_schema = schema.commit_avro_schema( + PROJECT_ID, AVRO_SCHEMA_ID, AVSC_REVISION_FILE + ) + schema.get_schema_revision(PROJECT_ID, AVRO_SCHEMA_ID, committed_schema.revision_id) + out, _ = capsys.readouterr() + assert "Got a schema revision" in out + assert f"{avro_schema}" in out + + +def test_rollback_schema_revision( + avro_schema: str, capsys: CaptureFixture[str] +) -> None: + committed_schema = schema.commit_avro_schema( + PROJECT_ID, AVRO_SCHEMA_ID, AVSC_REVISION_FILE + ) + schema.commit_avro_schema(PROJECT_ID, AVRO_SCHEMA_ID, AVSC_REVISION_FILE) + schema.rollback_schema_revision( + PROJECT_ID, AVRO_SCHEMA_ID, committed_schema.revision_id + ) + out, _ = capsys.readouterr() + assert "Rolled back a schema revision" in out + # assert f"{avro_schema}" in out + + +def test_delete_schema_revision(avro_schema: str, capsys: CaptureFixture[str]) -> None: + committed_schema = schema.commit_avro_schema( + PROJECT_ID, AVRO_SCHEMA_ID, AVSC_REVISION_FILE + ) + schema.commit_avro_schema(PROJECT_ID, AVRO_SCHEMA_ID, AVSC_REVISION_FILE) + schema.delete_schema_revision( + PROJECT_ID, AVRO_SCHEMA_ID, committed_schema.revision_id + ) + out, _ = capsys.readouterr() + assert "Deleted a schema revision" in out + # assert f"{avro_schema}" in out + + +def test_list_schemas(capsys: CaptureFixture[str]) -> None: + schema.list_schemas(PROJECT_ID) + out, _ = capsys.readouterr() + assert "Listed schemas." in out + + +def test_list_schema_revisions(capsys: CaptureFixture[str]) -> None: + schema.list_schema_revisions(PROJECT_ID, AVRO_SCHEMA_ID) + out, _ = capsys.readouterr() + assert "Listed schema revisions." in out + + +def test_create_topic_with_schema( + avro_schema: str, + avro_topic_to_create: str, + publisher_client: pubsub_v1.PublisherClient, + capsys: CaptureFixture[str], +) -> None: + schema.create_topic_with_schema( + PROJECT_ID, AVRO_TOPIC_ID_TO_CREATE, AVRO_SCHEMA_ID, "BINARY" + ) + out, _ = capsys.readouterr() + assert "Created a topic" in out + assert f"{AVRO_TOPIC_ID_TO_CREATE}" in out + assert f"{avro_schema}" in out + assert "BINARY" in out or "2" in out + + +def test_create_topic_with_schema_revisions( + proto_schema: str, + proto_with_revisions_topic_to_create: str, + publisher_client: pubsub_v1.PublisherClient, + capsys: CaptureFixture[str], +) -> None: + committed_schema = schema.commit_proto_schema( + PROJECT_ID, PROTO_SCHEMA_ID, PROTO_REVISION_FILE + ) + + schema.create_topic_with_schema_revisions( + PROJECT_ID, + PROTO_WITH_REVISIONS_TOPIC_ID_TO_CREATE, + PROTO_SCHEMA_ID, + committed_schema.revision_id, + committed_schema.revision_id, + "BINARY", + ) + out, _ = capsys.readouterr() + assert "Created a topic" in out + assert f"{PROTO_WITH_REVISIONS_TOPIC_ID_TO_CREATE}" in out + assert f"{proto_schema}" in out + assert "BINARY" in out or "2" in out + + +def test_update_topic_schema( + proto_schema: str, proto_with_revisions_topic: str, capsys: CaptureFixture[str] +) -> None: + committed_schema = schema.commit_proto_schema( + PROJECT_ID, PROTO_SCHEMA_ID, PROTO_REVISION_FILE + ) + + schema.update_topic_schema( + PROJECT_ID, + PROTO_WITH_REVISIONS_TOPIC_ID, + committed_schema.revision_id, + committed_schema.revision_id, + ) + out, _ = capsys.readouterr() + assert "Updated a topic schema" in out + assert f"{PROTO_WITH_REVISIONS_TOPIC_ID}" in out + assert f"{proto_schema}" in out + + +def test_publish_avro_records( + avro_schema: str, avro_topic: str, capsys: CaptureFixture[str] +) -> None: + schema.publish_avro_records(PROJECT_ID, AVRO_TOPIC_ID, AVSC_FILE) + out, _ = capsys.readouterr() + assert "Preparing a binary-encoded message" in out + assert "Published message ID" in out + + +def test_subscribe_with_avro_schema( + avro_schema: str, + avro_topic: str, + avro_subscription: str, + capsys: CaptureFixture[str], +) -> None: + schema.publish_avro_records(PROJECT_ID, AVRO_TOPIC_ID, AVSC_FILE) + + schema.subscribe_with_avro_schema(PROJECT_ID, AVRO_SUBSCRIPTION_ID, AVSC_FILE, 9) + out, _ = capsys.readouterr() + assert "Received a binary-encoded message:" in out + + +def test_subscribe_with_avro_schema_revisions( + avro_schema: str, + avro_topic: str, + avro_subscription: str, + capsys: CaptureFixture[str], +) -> None: + schema.publish_avro_records(PROJECT_ID, AVRO_TOPIC_ID, AVSC_FILE) + + schema.subscribe_with_avro_schema_with_revisions( + PROJECT_ID, AVRO_SUBSCRIPTION_ID, AVSC_FILE, 9 + ) + out, _ = capsys.readouterr() + assert "Received a binary-encoded message:" in out + + +def test_publish_proto_records(proto_topic: str, capsys: CaptureFixture[str]) -> None: + schema.publish_proto_messages(PROJECT_ID, PROTO_TOPIC_ID) + out, _ = capsys.readouterr() + assert "Preparing a binary-encoded message" in out + assert "Published message ID" in out + + +def test_subscribe_with_proto_schema( + proto_schema: str, + proto_topic: str, + proto_subscription: str, + capsys: CaptureFixture[str], +) -> None: + schema.publish_proto_messages(PROJECT_ID, PROTO_TOPIC_ID) + + schema.subscribe_with_proto_schema(PROJECT_ID, PROTO_SUBSCRIPTION_ID, 9) + out, _ = capsys.readouterr() + assert "Received a binary-encoded message" in out + + +C = TypeVar("C", bound=Callable[..., Any]) +typed_flaky = cast(Callable[[C], C], flaky(max_runs=3, min_passes=1)) + + +@typed_flaky +def test_delete_schema( + proto_schema_to_delete: str, capsys: CaptureFixture[str] +) -> None: + schema.delete_schema(PROJECT_ID, PROTO_SCHEMA_ID_TO_DELETE) + out, _ = capsys.readouterr() + assert "Deleted a schema" in out + assert f"{proto_schema_to_delete}" in out diff --git a/samples/snippets/subscriber.py b/samples/snippets/subscriber.py index f079e7d42..5549d056f 100644 --- a/samples/snippets/subscriber.py +++ b/samples/snippets/subscriber.py @@ -22,9 +22,14 @@ """ import argparse +import typing +from typing import Optional +if typing.TYPE_CHECKING: + from google.pubsub_v1 import types as gapic_types -def list_subscriptions_in_topic(project_id, topic_id): + +def list_subscriptions_in_topic(project_id: str, topic_id: str) -> None: """Lists all subscriptions for a given topic.""" # [START pubsub_list_topic_subscriptions] from google.cloud import pubsub_v1 @@ -36,12 +41,13 @@ def list_subscriptions_in_topic(project_id, topic_id): publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_id) - for subscription in publisher.list_topic_subscriptions(topic_path): + response = publisher.list_topic_subscriptions(request={"topic": topic_path}) + for subscription in response: print(subscription) # [END pubsub_list_topic_subscriptions] -def list_subscriptions_in_project(project_id): +def list_subscriptions_in_project(project_id: str) -> None: """Lists all subscriptions in the current project.""" # [START pubsub_list_subscriptions] from google.cloud import pubsub_v1 @@ -50,17 +56,107 @@ def list_subscriptions_in_project(project_id): # project_id = "your-project-id" subscriber = pubsub_v1.SubscriberClient() - project_path = subscriber.project_path(project_id) + project_path = f"projects/{project_id}" # Wrap the subscriber in a 'with' block to automatically call close() to # close the underlying gRPC channel when done. with subscriber: - for subscription in subscriber.list_subscriptions(project_path): + for subscription in subscriber.list_subscriptions( + request={"project": project_path} + ): print(subscription.name) # [END pubsub_list_subscriptions] -def create_subscription(project_id, topic_id, subscription_id): +def pubsub_subscribe_otel_tracing( + subscription_project_id: str, + cloud_trace_project_id: str, + subscription_id: str, + timeout: Optional[float] = None, +) -> None: + """ + Subscribe to `subscription_id` in `subscription_project_id` with OpenTelemetry enabled. + Export the OpenTelemetry traces to Google Cloud Trace in project + `trace_project_id` + Args: + subscription_project_id: project ID of the subscription. + cloud_trace_project_id: project ID to export Cloud Trace to. + subscription_id: subscription ID to subscribe from. + timeout: time until which to subscribe to. + Returns: + None + """ + # [START pubsub_subscribe_otel_tracing] + from opentelemetry import trace + from opentelemetry.sdk.trace import TracerProvider + from opentelemetry.sdk.trace.export import ( + BatchSpanProcessor, + ) + from opentelemetry.exporter.cloud_trace import CloudTraceSpanExporter + from opentelemetry.sdk.trace.sampling import TraceIdRatioBased, ParentBased + + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1 import SubscriberClient + from google.cloud.pubsub_v1.types import SubscriberOptions + + # TODO(developer) + # subscription_project_id = "your-subscription-project-id" + # subscription_id = "your-subscription-id" + # cloud_trace_project_id = "your-cloud-trace-project-id" + # timeout = 300.0 + + # In this sample, we use a Google Cloud Trace to export the OpenTelemetry + # traces: https://cloud.google.com/trace/docs/setup/python-ot + # Choose and configure the exporter for your set up accordingly. + + sampler = ParentBased(root=TraceIdRatioBased(1)) + trace.set_tracer_provider(TracerProvider(sampler=sampler)) + + # Export to Google Trace + cloud_trace_exporter = CloudTraceSpanExporter( + project_id=cloud_trace_project_id, + ) + trace.get_tracer_provider().add_span_processor( + BatchSpanProcessor(cloud_trace_exporter) + ) + # Set the `enable_open_telemetry_tracing` option to True when creating + # the subscriber client. This in itself is necessary and sufficient for + # the library to export OpenTelemetry traces. However, where the traces + # must be exported to needs to be configured based on your OpenTelemetry + # set up. Refer: https://opentelemetry.io/docs/languages/python/exporters/ + subscriber = SubscriberClient( + subscriber_options=SubscriberOptions(enable_open_telemetry_tracing=True) + ) + + # The `subscription_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/subscriptions/{subscription_id}` + subscription_path = subscriber.subscription_path( + subscription_project_id, subscription_id + ) + + # Define callback to be called when a message is received. + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + # Ack message after processing it. + print(message.data) + message.ack() + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # Optimistically subscribe to messages on the subscription. + streaming_pull_future = subscriber.subscribe( + subscription_path, callback=callback + ) + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + print("Successfully subscribed until the timeout passed.") + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. + + # [END pubsub_subscribe_otel_tracing] + + +def create_subscription(project_id: str, topic_id: str, subscription_id: str) -> None: """Create a new pull subscription on the given topic.""" # [START pubsub_create_pull_subscription] from google.cloud import pubsub_v1 @@ -70,22 +166,109 @@ def create_subscription(project_id, topic_id, subscription_id): # topic_id = "your-topic-id" # subscription_id = "your-subscription-id" + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) # Wrap the subscriber in a 'with' block to automatically call close() to # close the underlying gRPC channel when done. with subscriber: - subscription = subscriber.create_subscription(subscription_path, topic_path) + subscription = subscriber.create_subscription( + request={"name": subscription_path, "topic": topic_path} + ) - print("Subscription created: {}".format(subscription)) + print(f"Subscription created: {subscription}") # [END pubsub_create_pull_subscription] +def optimistic_subscribe( + project_id: str, + topic_id: str, + subscription_id: str, + timeout: Optional[float] = None, +) -> None: + """Optimistically subscribe to messages instead of making calls to verify existence + of a subscription first and then subscribing to messages from it. This avoids admin + operation calls to verify the existence of a subscription and reduces the probability + of running out of quota for admin operations.""" + # [START pubsub_optimistic_subscribe] + from google.api_core.exceptions import NotFound + from google.cloud import pubsub_v1 + from concurrent.futures import TimeoutError + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # Number of seconds the subscriber should listen for messages + # timeout = 5.0 + # topic_id = "your-topic-id" + + # Create a subscriber client. + subscriber = pubsub_v1.SubscriberClient() + + # The `subscription_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/subscriptions/{subscription_id}` + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + # Define callback to be called when a message is received. + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + # Ack message after processing it. + message.ack() + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # Optimistically subscribe to messages on the subscription. + streaming_pull_future = subscriber.subscribe( + subscription_path, callback=callback + ) + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + print("Successfully subscribed until the timeout passed.") + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. + except NotFound: + print(f"Subscription {subscription_path} not found, creating it.") + + try: + # If the subscription does not exist, then create it. + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + subscription = subscriber.create_subscription( + request={"name": subscription_path, "topic": topic_path} + ) + + if subscription: + print(f"Subscription {subscription.name} created") + else: + raise ValueError("Subscription creation failed.") + + # Subscribe on the created subscription. + try: + streaming_pull_future = subscriber.subscribe( + subscription.name, callback=callback + ) + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. + except Exception as e: + print( + f"Exception occurred when creating subscription and subscribing to it: {e}" + ) + except Exception as e: + print(f"Exception occurred when attempting optimistic subscribe: {e}") + # [END pubsub_optimistic_subscribe] + + def create_subscription_with_dead_letter_topic( - project_id, topic_id, subscription_id, dead_letter_topic_id -): + project_id: str, + topic_id: str, + subscription_id: str, + dead_letter_topic_id: str, + max_delivery_attempts: int = 5, +) -> None: """Create a subscription with dead letter policy.""" # [START pubsub_dead_letter_create_subscription] from google.cloud import pubsub_v1 @@ -102,36 +285,43 @@ def create_subscription_with_dead_letter_topic( # TODO(developer): This is an existing dead letter topic that the subscription # with dead letter policy will forward dead letter messages to. # dead_letter_topic_id = "your-dead-letter-topic-id" + # TODO(developer): This is the maximum number of delivery attempts allowed + # for a message before it gets delivered to a dead letter topic. + # max_delivery_attempts = 5 + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) - dead_letter_topic_path = subscriber.topic_path(project_id, dead_letter_topic_id) + dead_letter_topic_path = publisher.topic_path(project_id, dead_letter_topic_id) dead_letter_policy = DeadLetterPolicy( - dead_letter_topic=dead_letter_topic_path, max_delivery_attempts=10 + dead_letter_topic=dead_letter_topic_path, + max_delivery_attempts=max_delivery_attempts, ) with subscriber: - subscription = subscriber.create_subscription( - subscription_path, topic_path, dead_letter_policy=dead_letter_policy - ) - - print("Subscription created: {}".format(subscription.name)) + request = { + "name": subscription_path, + "topic": topic_path, + "dead_letter_policy": dead_letter_policy, + } + subscription = subscriber.create_subscription(request) + + print(f"Subscription created: {subscription.name}") print( - "It will forward dead letter messages to: {}".format( - subscription.dead_letter_policy.dead_letter_topic - ) + f"It will forward dead letter messages to: {subscription.dead_letter_policy.dead_letter_topic}." ) print( - "After {} delivery attempts.".format( - subscription.dead_letter_policy.max_delivery_attempts - ) + f"After {subscription.dead_letter_policy.max_delivery_attempts} delivery attempts." ) # [END pubsub_dead_letter_create_subscription] -def create_push_subscription(project_id, topic_id, subscription_id, endpoint): +def create_push_subscription( + project_id: str, topic_id: str, subscription_id: str, endpoint: str +) -> None: """Create a new push subscription on the given topic.""" # [START pubsub_create_push_subscription] from google.cloud import pubsub_v1 @@ -142,8 +332,9 @@ def create_push_subscription(project_id, topic_id, subscription_id, endpoint): # subscription_id = "your-subscription-id" # endpoint = "https://my-test-project.appspot.com/push" + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) push_config = pubsub_v1.types.PushConfig(push_endpoint=endpoint) @@ -152,15 +343,281 @@ def create_push_subscription(project_id, topic_id, subscription_id, endpoint): # close the underlying gRPC channel when done. with subscriber: subscription = subscriber.create_subscription( - subscription_path, topic_path, push_config + request={ + "name": subscription_path, + "topic": topic_path, + "push_config": push_config, + } ) - print("Push subscription created: {}".format(subscription)) - print("Endpoint for subscription is: {}".format(endpoint)) + print(f"Push subscription created: {subscription}.") + print(f"Endpoint for subscription is: {endpoint}") # [END pubsub_create_push_subscription] -def delete_subscription(project_id, subscription_id): +def create_push_no_wrapper_subscription( + project_id: str, topic_id: str, subscription_id: str, endpoint: str +) -> None: + """Create a new push no wrapper subscription on the given topic.""" + # [START pubsub_create_unwrapped_push_subscription] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + # endpoint = "https://my-test-project.appspot.com/push" + + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_path = publisher.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + no_wrapper = pubsub_v1.types.PushConfig.NoWrapper(write_metadata=True) + push_config = pubsub_v1.types.PushConfig( + push_endpoint=endpoint, no_wrapper=no_wrapper + ) + + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + subscription = subscriber.create_subscription( + request={ + "name": subscription_path, + "topic": topic_path, + "push_config": push_config, + } + ) + + print(f"Push no wrapper subscription created: {subscription}.") + print(f"Endpoint for subscription is: {endpoint}") + print(f"No wrapper configuration for subscription is: {no_wrapper}") + # [END pubsub_create_unwrapped_push_subscription] + + +def create_subscription_with_ordering( + project_id: str, topic_id: str, subscription_id: str +) -> None: + """Create a subscription with ordering enabled.""" + # [START pubsub_enable_subscription_ordering] + from google.cloud import pubsub_v1 + + # TODO(developer): Choose an existing topic. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_path = publisher.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + with subscriber: + subscription = subscriber.create_subscription( + request={ + "name": subscription_path, + "topic": topic_path, + "enable_message_ordering": True, + } + ) + print(f"Created subscription with ordering: {subscription}") + # [END pubsub_enable_subscription_ordering] + + +def create_subscription_with_filtering( + project_id: str, + topic_id: str, + subscription_id: str, + filter: str, +) -> None: + """Create a subscription with filtering enabled.""" + # [START pubsub_create_subscription_with_filter] + from google.cloud import pubsub_v1 + + # TODO(developer): Choose an existing topic. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + # filter = "attributes.author=\"unknown\"" + + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_path = publisher.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + with subscriber: + subscription = subscriber.create_subscription( + request={"name": subscription_path, "topic": topic_path, "filter": filter} + ) + print(f"Created subscription with filtering enabled: {subscription}") + # [END pubsub_create_subscription_with_filter] + + +def create_subscription_with_exactly_once_delivery( + project_id: str, topic_id: str, subscription_id: str +) -> None: + """Create a subscription with exactly once delivery enabled.""" + # [START pubsub_create_subscription_with_exactly_once_delivery] + from google.cloud import pubsub_v1 + + # TODO(developer): Choose an existing topic. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_path = publisher.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + with subscriber: + subscription = subscriber.create_subscription( + request={ + "name": subscription_path, + "topic": topic_path, + "enable_exactly_once_delivery": True, + } + ) + print( + f"Created subscription with exactly once delivery enabled: {subscription}" + ) + # [END pubsub_create_subscription_with_exactly_once_delivery] + + +def create_bigquery_subscription( + project_id: str, topic_id: str, subscription_id: str, bigquery_table_id: str +) -> None: + """Create a new BigQuery subscription on the given topic.""" + # [START pubsub_create_bigquery_subscription] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + # bigquery_table_id = "your-project.your-dataset.your-table" + + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_path = publisher.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + bigquery_config = pubsub_v1.types.BigQueryConfig( + table=bigquery_table_id, write_metadata=True + ) + + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + subscription = subscriber.create_subscription( + request={ + "name": subscription_path, + "topic": topic_path, + "bigquery_config": bigquery_config, + } + ) + + print(f"BigQuery subscription created: {subscription}.") + print(f"Table for subscription is: {bigquery_table_id}") + # [END pubsub_create_bigquery_subscription] + + +def create_cloudstorage_subscription( + project_id: str, topic_id: str, subscription_id: str, bucket: str +) -> None: + """Create a new CloudStorage subscription on the given topic.""" + # [START pubsub_create_cloud_storage_subscription] + from google.cloud import pubsub_v1 + from google.protobuf import duration_pb2 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + # bucket = "my-bucket" + + filename_prefix = "log_events_" + filename_suffix = ".avro" + # Either CloudStorageConfig.AvroConfig or CloudStorageConfig.TextConfig + # defaults to TextConfig + avro_config = pubsub_v1.types.CloudStorageConfig.AvroConfig(write_metadata=True) + + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_path = publisher.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + max_duration = duration_pb2.Duration() + max_duration.FromSeconds(300) + + cloudstorage_config = pubsub_v1.types.CloudStorageConfig( + bucket=bucket, + filename_prefix=filename_prefix, + filename_suffix=filename_suffix, + avro_config=avro_config, + # Min 1 minutes, max 10 minutes + max_duration=max_duration, + # Min 1 KB, max 10 GiB + max_bytes=10000000, + ) + + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + subscription = subscriber.create_subscription( + request={ + "name": subscription_path, + "topic": topic_path, + "cloud_storage_config": cloudstorage_config, + } + ) + + print(f"CloudStorage subscription created: {subscription}.") + print(f"Bucket for subscription is: {bucket}") + print(f"Prefix is: {filename_prefix}") + print(f"Suffix is: {filename_suffix}") + # [END pubsub_create_cloud_storage_subscription] + + +def create_subscription_with_smt( + project_id: str, topic_id: str, subscription_id: str +) -> None: + """Create a subscription with a UDF SMT.""" + # [START pubsub_create_subscription_with_smt] + from google.cloud import pubsub_v1 + from google.pubsub_v1.types import JavaScriptUDF, MessageTransform + + # TODO(developer): Choose an existing topic. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_path = publisher.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + code = """function redactSSN(message, metadata) { + const data = JSON.parse(message.data); + delete data['ssn']; + message.data = JSON.stringify(data); + return message; + }""" + udf = JavaScriptUDF(code=code, function_name="redactSSN") + transforms = [MessageTransform(javascript_udf=udf)] + + with subscriber: + subscription = subscriber.create_subscription( + request={ + "name": subscription_path, + "topic": topic_path, + "message_transforms": transforms, + } + ) + print(f"Created subscription with SMT: {subscription}") + # [END pubsub_create_subscription_with_smt] + + +def delete_subscription(project_id: str, subscription_id: str) -> None: """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_subscription] from google.cloud import pubsub_v1 @@ -175,13 +632,15 @@ def delete_subscription(project_id, subscription_id): # Wrap the subscriber in a 'with' block to automatically call close() to # close the underlying gRPC channel when done. with subscriber: - subscriber.delete_subscription(subscription_path) + subscriber.delete_subscription(request={"subscription": subscription_path}) - print("Subscription deleted: {}".format(subscription_path)) + print(f"Subscription deleted: {subscription_path}.") # [END pubsub_delete_subscription] -def update_push_subscription(project_id, topic_id, subscription_id, endpoint): +def update_push_subscription( + project_id: str, topic_id: str, subscription_id: str, endpoint: str +) -> None: """ Updates an existing Pub/Sub subscription's push endpoint URL. Note that certain properties of a subscription, such as @@ -210,16 +669,22 @@ def update_push_subscription(project_id, topic_id, subscription_id, endpoint): # Wrap the subscriber in a 'with' block to automatically call close() to # close the underlying gRPC channel when done. with subscriber: - result = subscriber.update_subscription(subscription, update_mask) + result = subscriber.update_subscription( + request={"subscription": subscription, "update_mask": update_mask} + ) - print("Subscription updated: {}".format(subscription_path)) - print("New endpoint for subscription is: {}".format(result.push_config)) + print(f"Subscription updated: {subscription_path}") + print(f"New endpoint for subscription is: {result.push_config}.") # [END pubsub_update_push_configuration] def update_subscription_with_dead_letter_policy( - project_id, topic_id, subscription_id, dead_letter_topic_id -): + project_id: str, + topic_id: str, + subscription_id: str, + dead_letter_topic_id: str, + max_delivery_attempts: int = 5, +) -> "gapic_types.Subscription": """Update a subscription's dead letter policy.""" # [START pubsub_dead_letter_update_subscription] from google.cloud import pubsub_v1 @@ -235,41 +700,55 @@ def update_subscription_with_dead_letter_policy( # TODO(developer): This is an existing dead letter topic that the subscription # with dead letter policy will forward dead letter messages to. # dead_letter_topic_id = "your-dead-letter-topic-id" + # TODO(developer): This is the maximum number of delivery attempts allowed + # for a message before it gets delivered to a dead letter topic. + # max_delivery_attempts = 5 + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) - dead_letter_topic_path = subscriber.topic_path(project_id, dead_letter_topic_id) + dead_letter_topic_path = publisher.topic_path(project_id, dead_letter_topic_id) - subscription_before_update = subscriber.get_subscription(subscription_path) - print("Before the update: {}".format(subscription_before_update)) + subscription_before_update = subscriber.get_subscription( + request={"subscription": subscription_path} + ) + print(f"Before the update: {subscription_before_update}.") # Indicates which fields in the provided subscription to update. - update_mask = FieldMask(paths=["dead_letter_policy.max_delivery_attempts"]) + update_mask = FieldMask(paths=["dead_letter_policy"]) # Construct a dead letter policy you expect to have after the update. dead_letter_policy = DeadLetterPolicy( - dead_letter_topic=dead_letter_topic_path, max_delivery_attempts=20 + dead_letter_topic=dead_letter_topic_path, + max_delivery_attempts=max_delivery_attempts, ) # Construct the subscription with the dead letter policy you expect to have # after the update. Here, values in the required fields (name, topic) help # identify the subscription. subscription = pubsub_v1.types.Subscription( - name=subscription_path, topic=topic_path, dead_letter_policy=dead_letter_policy, + name=subscription_path, + topic=topic_path, + dead_letter_policy=dead_letter_policy, ) with subscriber: - subscription_after_update = subscriber.update_subscription( - subscription, update_mask + subscription_after_update: gapic_types.Subscription = ( + subscriber.update_subscription( + request={"subscription": subscription, "update_mask": update_mask} + ) ) - print("After the update: {}".format(subscription_after_update)) + print(f"After the update: {subscription_after_update}.") # [END pubsub_dead_letter_update_subscription] return subscription_after_update -def remove_dead_letter_policy(project_id, topic_id, subscription_id): +def remove_dead_letter_policy( + project_id: str, topic_id: str, subscription_id: str +) -> "gapic_types.Subscription": """Remove dead letter policy from a subscription.""" # [START pubsub_dead_letter_remove] from google.cloud import pubsub_v1 @@ -283,20 +762,18 @@ def remove_dead_letter_policy(project_id, topic_id, subscription_id): # TODO(developer): This is an existing subscription with a dead letter policy. # subscription_id = "your-subscription-id" + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) - subscription_before_update = subscriber.get_subscription(subscription_path) - print("Before removing the policy: {}".format(subscription_before_update)) + subscription_before_update = subscriber.get_subscription( + request={"subscription": subscription_path} + ) + print(f"Before removing the policy: {subscription_before_update}.") # Indicates which fields in the provided subscription to update. - update_mask = FieldMask( - paths=[ - "dead_letter_policy.dead_letter_topic", - "dead_letter_policy.max_delivery_attempts", - ] - ) + update_mask = FieldMask(paths=["dead_letter_policy"]) # Construct the subscription (without any dead letter policy) that you # expect to have after the update. @@ -305,16 +782,20 @@ def remove_dead_letter_policy(project_id, topic_id, subscription_id): ) with subscriber: - subscription_after_update = subscriber.update_subscription( - subscription, update_mask + subscription_after_update: gapic_types.Subscription = ( + subscriber.update_subscription( + request={"subscription": subscription, "update_mask": update_mask} + ) ) - print("After removing the policy: {}".format(subscription_after_update)) + print(f"After removing the policy: {subscription_after_update}.") # [END pubsub_dead_letter_remove] return subscription_after_update -def receive_messages(project_id, subscription_id, timeout=None): +def receive_messages( + project_id: str, subscription_id: str, timeout: Optional[float] = None +) -> None: """Receives messages from a pull subscription.""" # [START pubsub_subscriber_async_pull] # [START pubsub_quickstart_subscriber] @@ -332,12 +813,12 @@ def receive_messages(project_id, subscription_id, timeout=None): # in the form `projects/{project_id}/subscriptions/{subscription_id}` subscription_path = subscriber.subscription_path(project_id, subscription_id) - def callback(message): - print("Received message: {}".format(message)) + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + print(f"Received {message}.") message.ack() streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) - print("Listening for messages on {}..\n".format(subscription_path)) + print(f"Listening for messages on {subscription_path}..\n") # Wrap subscriber in a 'with' block to automatically call close() when done. with subscriber: @@ -346,12 +827,15 @@ def callback(message): # unless an exception is encountered first. streaming_pull_future.result(timeout=timeout) except TimeoutError: - streaming_pull_future.cancel() + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. # [END pubsub_subscriber_async_pull] # [END pubsub_quickstart_subscriber] -def receive_messages_with_custom_attributes(project_id, subscription_id, timeout=None): +def receive_messages_with_custom_attributes( + project_id: str, subscription_id: str, timeout: Optional[float] = None +) -> None: """Receives messages from a pull subscription.""" # [START pubsub_subscriber_async_pull_custom_attributes] from concurrent.futures import TimeoutError @@ -366,17 +850,17 @@ def receive_messages_with_custom_attributes(project_id, subscription_id, timeout subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_id) - def callback(message): - print("Received message: {}".format(message.data)) + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + print(f"Received {message.data!r}.") if message.attributes: print("Attributes:") for key in message.attributes: value = message.attributes.get(key) - print("{}: {}".format(key, value)) + print(f"{key}: {value}") message.ack() streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) - print("Listening for messages on {}..\n".format(subscription_path)) + print(f"Listening for messages on {subscription_path}..\n") # Wrap subscriber in a 'with' block to automatically call close() when done. with subscriber: @@ -385,11 +869,14 @@ def callback(message): # unless an exception is encountered first. streaming_pull_future.result(timeout=timeout) except TimeoutError: - streaming_pull_future.cancel() + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. # [END pubsub_subscriber_async_pull_custom_attributes] -def receive_messages_with_flow_control(project_id, subscription_id, timeout=None): +def receive_messages_with_flow_control( + project_id: str, subscription_id: str, timeout: Optional[float] = None +) -> None: """Receives messages from a pull subscription with flow control.""" # [START pubsub_subscriber_flow_settings] from concurrent.futures import TimeoutError @@ -404,8 +891,8 @@ def receive_messages_with_flow_control(project_id, subscription_id, timeout=None subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_id) - def callback(message): - print("Received message: {}".format(message.data)) + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + print(f"Received {message.data!r}.") message.ack() # Limit the subscriber to only have ten outstanding messages at a time. @@ -414,7 +901,7 @@ def callback(message): streaming_pull_future = subscriber.subscribe( subscription_path, callback=callback, flow_control=flow_control ) - print("Listening for messages on {}..\n".format(subscription_path)) + print(f"Listening for messages on {subscription_path}..\n") # Wrap subscriber in a 'with' block to automatically call close() when done. with subscriber: @@ -423,13 +910,121 @@ def callback(message): # unless an exception is encountered first. streaming_pull_future.result(timeout=timeout) except TimeoutError: - streaming_pull_future.cancel() + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. # [END pubsub_subscriber_flow_settings] -def synchronous_pull(project_id, subscription_id): +def receive_messages_with_blocking_shutdown( + project_id: str, subscription_id: str, timeout: float = 5.0 +) -> None: + """Shuts down a pull subscription by awaiting message callbacks to complete.""" + # [START pubsub_subscriber_blocking_shutdown] + import time + from concurrent.futures import TimeoutError + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # Number of seconds the subscriber should listen for messages + # timeout = 5.0 + + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + print(f"Received {message.data!r}.") + time.sleep(timeout + 3.0) # Pocess longer than streaming pull future timeout. + message.ack() + print(f"Done processing the message {message.data!r}.") + + streaming_pull_future = subscriber.subscribe( + subscription_path, + callback=callback, + await_callbacks_on_shutdown=True, + ) + print(f"Listening for messages on {subscription_path}..\n") + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() + print("Streaming pull future canceled.") + streaming_pull_future.result() # Blocks until shutdown complete. + print("Done waiting for the stream shutdown.") + + # The "Done waiting..." message is only printed *after* the processing of all + # received messages has completed. + # [END pubsub_subscriber_blocking_shutdown] + + +def receive_messages_with_exactly_once_delivery_enabled( + project_id: str, subscription_id: str, timeout: Optional[float] = None +) -> None: + """Receives messages from a pull subscription with exactly-once delivery enabled. + This is a preview feature. For more details, see: + https://cloud.google.com/pubsub/docs/exactly-once-delivery." + """ + # [START pubsub_subscriber_exactly_once] + from concurrent.futures import TimeoutError + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1.subscriber import exceptions as sub_exceptions + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # Number of seconds the subscriber should listen for messages + # timeout = 5.0 + + subscriber = pubsub_v1.SubscriberClient() + # The `subscription_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/subscriptions/{subscription_id}` + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + print(f"Received {message}.") + + # Use `ack_with_response()` instead of `ack()` to get a future that tracks + # the result of the acknowledge call. When exactly-once delivery is enabled + # on the subscription, the message is guaranteed to not be delivered again + # if the ack future succeeds. + ack_future = message.ack_with_response() + + try: + # Block on result of acknowledge call. + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + ack_future.result(timeout=timeout) + print(f"Ack for message {message.message_id} successful.") + except sub_exceptions.AcknowledgeError as e: + print( + f"Ack for message {message.message_id} failed with error: {e.error_code}" + ) + + streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) + print(f"Listening for messages on {subscription_path}..\n") + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. + # [END pubsub_subscriber_exactly_once] + + +def synchronous_pull(project_id: str, subscription_id: str) -> None: """Pulling messages synchronously.""" # [START pubsub_subscriber_sync_pull] + from google.api_core import retry from google.cloud import pubsub_v1 # TODO(developer) @@ -444,35 +1039,50 @@ def synchronous_pull(project_id, subscription_id): # Wrap the subscriber in a 'with' block to automatically call close() to # close the underlying gRPC channel when done. with subscriber: - # The subscriber pulls a specific number of messages. - response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES) + # The subscriber pulls a specific number of messages. The actual + # number of messages pulled may be smaller than max_messages. + response = subscriber.pull( + request={"subscription": subscription_path, "max_messages": NUM_MESSAGES}, + retry=retry.Retry(deadline=300), + ) + + if len(response.received_messages) == 0: + return ack_ids = [] for received_message in response.received_messages: - print("Received: {}".format(received_message.message.data)) + print(f"Received: {received_message.message.data}.") ack_ids.append(received_message.ack_id) # Acknowledges the received messages so they will not be sent again. - subscriber.acknowledge(subscription_path, ack_ids) + subscriber.acknowledge( + request={"subscription": subscription_path, "ack_ids": ack_ids} + ) print( - "Received and acknowledged {} messages. Done.".format( - len(response.received_messages) - ) + f"Received and acknowledged {len(response.received_messages)} messages from {subscription_path}." ) # [END pubsub_subscriber_sync_pull] -def synchronous_pull_with_lease_management(project_id, subscription_id): +def synchronous_pull_with_lease_management( + project_id: str, subscription_id: str +) -> None: """Pulling messages synchronously with lease management""" # [START pubsub_subscriber_sync_pull_with_lease] import logging import multiprocessing - import random + import sys import time + from google.api_core import retry from google.cloud import pubsub_v1 + multiprocessing.log_to_stderr() + logger = multiprocessing.get_logger() + logger.setLevel(logging.INFO) + processes = dict() + # TODO(developer) # project_id = "your-project-id" # subscription_id = "your-subscription-id" @@ -480,69 +1090,50 @@ def synchronous_pull_with_lease_management(project_id, subscription_id): subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_id) - NUM_MESSAGES = 2 - ACK_DEADLINE = 30 - SLEEP_TIME = 10 - - # The subscriber pulls a specific number of messages. - response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES) - - multiprocessing.log_to_stderr() - logger = multiprocessing.get_logger() - logger.setLevel(logging.INFO) + response = subscriber.pull( + request={"subscription": subscription_path, "max_messages": 3}, + retry=retry.Retry(deadline=300), + ) - def worker(msg): - """Simulates a long-running process.""" - RUN_TIME = random.randint(1, 60) - logger.info( - "{}: Running {} for {}s".format( - time.strftime("%X", time.gmtime()), msg.message.data, RUN_TIME - ) - ) - time.sleep(RUN_TIME) + if len(response.received_messages) == 0: + return - # `processes` stores process as key and ack id and message as values. - processes = dict() + # Start a process for each message based on its size modulo 10. for message in response.received_messages: - process = multiprocessing.Process(target=worker, args=(message,)) + process = multiprocessing.Process( + target=time.sleep, args=(sys.getsizeof(message) % 10,) + ) processes[process] = (message.ack_id, message.message.data) process.start() while processes: + # Take a break every second. + if processes: + time.sleep(1) + for process in list(processes): ack_id, msg_data = processes[process] - # If the process is still running, reset the ack deadline as - # specified by ACK_DEADLINE once every while as specified - # by SLEEP_TIME. + # If the process is running, reset the ack deadline. if process.is_alive(): - # `ack_deadline_seconds` must be between 10 to 600. subscriber.modify_ack_deadline( - subscription_path, [ack_id], ack_deadline_seconds=ACK_DEADLINE, - ) - logger.info( - "{}: Reset ack deadline for {} for {}s".format( - time.strftime("%X", time.gmtime()), msg_data, ACK_DEADLINE, - ) + request={ + "subscription": subscription_path, + "ack_ids": [ack_id], + # Must be between 10 and 600. + "ack_deadline_seconds": 15, + } ) + logger.debug(f"Reset ack deadline for {msg_data}.") - # If the processs is finished, acknowledges using `ack_id`. + # If the process is complete, acknowledge the message. else: - subscriber.acknowledge(subscription_path, [ack_id]) - logger.info( - "{}: Acknowledged {}".format( - time.strftime("%X", time.gmtime()), msg_data - ) + subscriber.acknowledge( + request={"subscription": subscription_path, "ack_ids": [ack_id]} ) + logger.debug(f"Acknowledged {msg_data}.") processes.pop(process) - - # If there are still processes running, sleeps the thread. - if processes: - time.sleep(SLEEP_TIME) - print( - "Received and acknowledged {} messages. Done.".format( - len(response.received_messages) - ) + f"Received and acknowledged {len(response.received_messages)} messages from {subscription_path}." ) # Close the underlying gPRC channel. Alternatively, wrap subscriber in @@ -551,7 +1142,9 @@ def worker(msg): # [END pubsub_subscriber_sync_pull_with_lease] -def listen_for_errors(project_id, subscription_id, timeout=None): +def listen_for_errors( + project_id: str, subscription_id: str, timeout: Optional[float] = None +) -> None: """Receives messages and catches errors from a pull subscription.""" # [START pubsub_subscriber_error_listener] from google.cloud import pubsub_v1 @@ -565,12 +1158,12 @@ def listen_for_errors(project_id, subscription_id, timeout=None): subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_id) - def callback(message): - print("Received message: {}".format(message)) + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + print(f"Received {message}.") message.ack() streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) - print("Listening for messages on {}..\n".format(subscription_path)) + print(f"Listening for messages on {subscription_path}..\n") # Wrap subscriber in a 'with' block to automatically call close() when done. with subscriber: @@ -579,17 +1172,18 @@ def callback(message): try: streaming_pull_future.result(timeout=timeout) except Exception as e: - streaming_pull_future.cancel() print( - "Listening for messages on {} threw an exception: {}.".format( - subscription_id, e - ) + f"Listening for messages on {subscription_path} threw an exception: {e}." ) + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. # [END pubsub_subscriber_error_listener] -def receive_messages_with_delivery_attempts(project_id, subscription_id, timeout=None): - # [START pubsub_dead_letter_delivery_attempt] +def receive_messages_with_delivery_attempts( + project_id: str, subscription_id: str, timeout: Optional[float] = None +) -> None: + # [START pubsub_dead_letter_delivery_attempt] from concurrent.futures import TimeoutError from google.cloud import pubsub_v1 @@ -600,13 +1194,13 @@ def receive_messages_with_delivery_attempts(project_id, subscription_id, timeout subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_id) - def callback(message): - print("Received message: {}".format(message)) - print("With delivery attempts: {}".format(message.delivery_attempt)) + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + print(f"Received {message}.") + print(f"With delivery attempts: {message.delivery_attempt}.") message.ack() streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) - print("Listening for messages on {}..\n".format(subscription_path)) + print(f"Listening for messages on {subscription_path}..\n") # Wrap subscriber in a 'with' block to automatically call close() when done. with subscriber: @@ -615,13 +1209,58 @@ def callback(message): try: streaming_pull_future.result(timeout=timeout) except TimeoutError: - streaming_pull_future.cancel() - # [END pubsub_dead_letter_delivery_attempt] + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. + # [END pubsub_dead_letter_delivery_attempt] + + +def receive_messages_with_concurrency_control( + project_id: str, subscription_id: str, timeout: Optional[float] = None +) -> None: + # [START pubsub_subscriber_concurrency_control] + from concurrent import futures + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # Number of seconds the subscriber should listen for messages + # timeout = 5.0 + + # An optional executor to use. If not specified, a default one with maximum 10 + # threads will be created. + executor = futures.ThreadPoolExecutor(max_workers=5) + # A thread pool-based scheduler. It must not be shared across SubscriberClients. + scheduler = pubsub_v1.subscriber.scheduler.ThreadScheduler(executor) + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) -if __name__ == "__main__": + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + print(f"Received {message.data!r}.") + message.ack() + + streaming_pull_future = subscriber.subscribe( + subscription_path, callback=callback, scheduler=scheduler + ) + print(f"Listening for messages on {subscription_path}..\n") + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. + # [END pubsub_subscriber_concurrency_control] + + +if __name__ == "__main__": # noqa parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument("project_id", help="Your Google Cloud project ID") @@ -635,6 +1274,14 @@ def callback(message): "list-in-project", help=list_subscriptions_in_project.__doc__ ) + otel_subscribe_parse = subparsers.add_parser( + "otel-subscribe", help=pubsub_subscribe_otel_tracing.__doc__ + ) + otel_subscribe_parse.add_argument("subscription_project_id") + otel_subscribe_parse.add_argument("cloud_trace_project_id") + otel_subscribe_parse.add_argument("subscription_id") + otel_subscribe_parse.add_argument("timeout", default=None, type=float, nargs="?") + create_parser = subparsers.add_parser("create", help=create_subscription.__doc__) create_parser.add_argument("topic_id") create_parser.add_argument("subscription_id") @@ -646,6 +1293,9 @@ def callback(message): create_with_dead_letter_policy_parser.add_argument("topic_id") create_with_dead_letter_policy_parser.add_argument("subscription_id") create_with_dead_letter_policy_parser.add_argument("dead_letter_topic_id") + create_with_dead_letter_policy_parser.add_argument( + "max_delivery_attempts", type=int, nargs="?", default=5 + ) create_push_parser = subparsers.add_parser( "create-push", help=create_push_subscription.__doc__ @@ -654,6 +1304,57 @@ def callback(message): create_push_parser.add_argument("subscription_id") create_push_parser.add_argument("endpoint") + create_push_no_wrapper_parser = subparsers.add_parser( + "create-push-no-wrapper", help=create_push_no_wrapper_subscription.__doc__ + ) + create_push_no_wrapper_parser.add_argument("topic_id") + create_push_no_wrapper_parser.add_argument("subscription_id") + create_push_no_wrapper_parser.add_argument("endpoint") + + create_subscription_with_ordering_parser = subparsers.add_parser( + "create-with-ordering", help=create_subscription_with_ordering.__doc__ + ) + create_subscription_with_ordering_parser.add_argument("topic_id") + create_subscription_with_ordering_parser.add_argument("subscription_id") + + create_subscription_with_filtering_parser = subparsers.add_parser( + "create-with-filtering", help=create_subscription_with_filtering.__doc__ + ) + create_subscription_with_filtering_parser.add_argument("topic_id") + create_subscription_with_filtering_parser.add_argument("subscription_id") + create_subscription_with_filtering_parser.add_argument("filter") + + create_subscription_with_exactly_once_delivery_parser = subparsers.add_parser( + "create-with-exactly-once", + help=create_subscription_with_exactly_once_delivery.__doc__, + ) + create_subscription_with_exactly_once_delivery_parser.add_argument("topic_id") + create_subscription_with_exactly_once_delivery_parser.add_argument( + "subscription_id" + ) + + create_bigquery_subscription_parser = subparsers.add_parser( + "create-biquery", + help=create_bigquery_subscription.__doc__, + ) + create_bigquery_subscription_parser.add_argument("topic_id") + create_bigquery_subscription_parser.add_argument("subscription_id") + create_bigquery_subscription_parser.add_argument("bigquery_table_id") + + create_cloudstorage_subscription_parser = subparsers.add_parser( + "create-cloudstorage", + help=create_cloudstorage_subscription.__doc__, + ) + create_cloudstorage_subscription_parser.add_argument("topic_id") + create_cloudstorage_subscription_parser.add_argument("subscription_id") + create_cloudstorage_subscription_parser.add_argument("bucket") + + create_subscription_with_smt_parser = subparsers.add_parser( + "create-with-smt", help=create_subscription_with_smt.__doc__ + ) + create_subscription_with_smt_parser.add_argument("topic_id") + create_subscription_with_smt_parser.add_argument("subscription_id") + delete_parser = subparsers.add_parser("delete", help=delete_subscription.__doc__) delete_parser.add_argument("subscription_id") @@ -671,6 +1372,9 @@ def callback(message): update_dead_letter_policy_parser.add_argument("topic_id") update_dead_letter_policy_parser.add_argument("subscription_id") update_dead_letter_policy_parser.add_argument("dead_letter_topic_id") + update_dead_letter_policy_parser.add_argument( + "max_delivery_attempts", type=int, nargs="?", default=5 + ) remove_dead_letter_policy_parser = subparsers.add_parser( "remove-dead-letter-policy", help=remove_dead_letter_policy.__doc__ @@ -678,6 +1382,15 @@ def callback(message): remove_dead_letter_policy_parser.add_argument("topic_id") remove_dead_letter_policy_parser.add_argument("subscription_id") + optimistic_subscribe_parser = subparsers.add_parser( + "optimistic-subscribe", help=optimistic_subscribe.__doc__ + ) + optimistic_subscribe_parser.add_argument("topic_id") + optimistic_subscribe_parser.add_argument("subscription_id") + optimistic_subscribe_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) + receive_parser = subparsers.add_parser("receive", help=receive_messages.__doc__) receive_parser.add_argument("subscription_id") receive_parser.add_argument("timeout", default=None, type=float, nargs="?") @@ -699,6 +1412,26 @@ def callback(message): "timeout", default=None, type=float, nargs="?" ) + receive_with_blocking_shutdown_parser = subparsers.add_parser( + "receive-blocking-shutdown", + help=receive_messages_with_blocking_shutdown.__doc__, + ) + receive_with_blocking_shutdown_parser.add_argument("subscription_id") + receive_with_blocking_shutdown_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) + + receive_messages_with_exactly_once_delivery_enabled_parser = subparsers.add_parser( + "receive-messages-with-exactly-once-delivery-enabled", + help=receive_messages_with_exactly_once_delivery_enabled.__doc__, + ) + receive_messages_with_exactly_once_delivery_enabled_parser.add_argument( + "subscription_id" + ) + receive_messages_with_exactly_once_delivery_enabled_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) + synchronous_pull_parser = subparsers.add_parser( "receive-synchronously", help=synchronous_pull.__doc__ ) @@ -727,6 +1460,15 @@ def callback(message): "timeout", default=None, type=float, nargs="?" ) + receive_messages_with_concurrency_control_parser = subparsers.add_parser( + "receive-messages-with-concurrency-control", + help=receive_messages_with_concurrency_control.__doc__, + ) + receive_messages_with_concurrency_control_parser.add_argument("subscription_id") + receive_messages_with_concurrency_control_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) + args = parser.parse_args() if args.command == "list-in-topic": @@ -741,16 +1483,49 @@ def callback(message): args.topic_id, args.subscription_id, args.dead_letter_topic_id, + args.max_delivery_attempts, ) elif args.command == "create-push": create_push_subscription( - args.project_id, args.topic_id, args.subscription_id, args.endpoint, + args.project_id, args.topic_id, args.subscription_id, args.endpoint + ) + elif args.command == "create-push-no-wrapper": + create_push_no_wrapper_subscription( + args.project_id, args.topic_id, args.subscription_id, args.endpoint + ) + elif args.command == "create-with-ordering": + create_subscription_with_ordering( + args.project_id, args.topic_id, args.subscription_id + ) + elif args.command == "create-with-filtering": + create_subscription_with_filtering( + args.project_id, args.topic_id, args.subscription_id, args.filter + ) + elif args.command == "create-with-exactly-once": + create_subscription_with_exactly_once_delivery( + args.project_id, args.topic_id, args.subscription_id + ) + elif args.command == "create-bigquery": + create_bigquery_subscription( + args.project_id, + args.topic_id, + args.subscription_id, + args.bigquery_table_id, ) + elif args.command == "create-cloudstorage": + create_cloudstorage_subscription( + args.project_id, args.topic_id, args.subscription_id, args.bucket + ) + elif args.command == "create-with-smt": + create_subscription_with_smt( + args.project_id, args.topic_id, args.subscription_id + ) + elif args.command == "delete": delete_subscription(args.project_id, args.subscription_id) elif args.command == "update-push": update_push_subscription( - args.project_id, args.topic_id, args.subscription_id, args.endpoint, + args.project_id, args.topic_id, args.subscription_id, args.endpoint ) elif args.command == "update-dead-letter-policy": update_subscription_with_dead_letter_policy( @@ -758,9 +1533,14 @@ def callback(message): args.topic_id, args.subscription_id, args.dead_letter_topic_id, + args.max_delivery_attempts, ) elif args.command == "remove-dead-letter-policy": remove_dead_letter_policy(args.project_id, args.topic_id, args.subscription_id) + elif args.command == "optimistic-subscribe": + optimistic_subscribe( + args.project_id, args.topic_id, args.subscription_id, args.timeout + ) elif args.command == "receive": receive_messages(args.project_id, args.subscription_id, args.timeout) elif args.command == "receive-custom-attributes": @@ -771,6 +1551,14 @@ def callback(message): receive_messages_with_flow_control( args.project_id, args.subscription_id, args.timeout ) + elif args.command == "receive-blocking-shutdown": + receive_messages_with_blocking_shutdown( + args.project_id, args.subscription_id, args.timeout + ) + elif args.command == "receive-messages-with-exactly-once-delivery-enabled": + receive_messages_with_exactly_once_delivery_enabled( + args.project_id, args.subscription_id, args.timeout + ) elif args.command == "receive-synchronously": synchronous_pull(args.project_id, args.subscription_id) elif args.command == "receive-synchronously-with-lease": @@ -781,3 +1569,14 @@ def callback(message): receive_messages_with_delivery_attempts( args.project_id, args.subscription_id, args.timeout ) + elif args.command == "receive-messages-with-concurrency-control": + receive_messages_with_concurrency_control( + args.project_id, args.subscription_id, args.timeout + ) + elif args.command == "otel-subscribe": + pubsub_subscribe_otel_tracing( + args.subscription_project_id, + args.cloud_trace_project_id, + args.subscription_id, + args.timeout, + ) diff --git a/samples/snippets/subscriber_test.py b/samples/snippets/subscriber_test.py index a7f7c139c..53a844e01 100644 --- a/samples/snippets/subscriber_test.py +++ b/samples/snippets/subscriber_test.py @@ -13,329 +13,1153 @@ # limitations under the License. import os +import re +import sys +import time +from typing import Any, Callable, cast, Generator, List, TypeVar import uuid +from _pytest.capture import CaptureFixture import backoff -from google.cloud import pubsub_v1 +from flaky import flaky +from google.api_core.exceptions import NotFound +from google.cloud import bigquery, pubsub_v1, storage import pytest import subscriber +# This uuid is shared across tests which run in parallel. UUID = uuid.uuid4().hex -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] -TOPIC = "subscription-test-topic-" + UUID -DEAD_LETTER_TOPIC = "subscription-test-dead-letter-topic-" + UUID -SUBSCRIPTION_ADMIN = "subscription-test-subscription-admin-" + UUID -SUBSCRIPTION_ASYNC = "subscription-test-subscription-async-" + UUID -SUBSCRIPTION_SYNC = "subscription-test-subscription-sync-" + UUID -SUBSCRIPTION_DLQ = "subscription-test-subscription-dlq-" + UUID -ENDPOINT = "https://{}.appspot.com/push".format(PROJECT) -NEW_ENDPOINT = "https://{}.appspot.com/push2".format(PROJECT) +PY_VERSION = f"{sys.version_info.major}.{sys.version_info.minor}" +UNDERSCORE_PY_VERSION = PY_VERSION.replace(".", "_") +PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] +TOPIC = f"subscription-test-topic-{PY_VERSION}-{UUID}" +DEAD_LETTER_TOPIC = f"subscription-test-dead-letter-topic-{PY_VERSION}-{UUID}" +UNUSED_TOPIC = f"subscription-unused-topic-{PY_VERSION}-{UUID}" +EOD_TOPIC = f"subscription-test-eod-topic-{PY_VERSION}-{UUID}" +SUBSCRIPTION_ADMIN = f"subscription-test-subscription-admin-{PY_VERSION}-{UUID}" +ENDPOINT = f"https://{PROJECT_ID}.appspot.com/push" +NEW_ENDPOINT = f"https://{PROJECT_ID}.appspot.com/push2" +REGIONAL_ENDPOINT = "us-east1-pubsub.googleapis.com:443" +DEFAULT_MAX_DELIVERY_ATTEMPTS = 5 +UPDATED_MAX_DELIVERY_ATTEMPTS = 20 +FILTER = 'attributes.author="unknown"' +BIGQUERY_DATASET_ID = f"python_samples_dataset_{UNDERSCORE_PY_VERSION}_{UUID}" +BIGQUERY_TABLE_ID = f"python_samples_table_{UNDERSCORE_PY_VERSION}_{UUID}" +CLOUDSTORAGE_BUCKET = f"python_samples_bucket_{UNDERSCORE_PY_VERSION}_{UUID}" + +C = TypeVar("C", bound=Callable[..., Any]) + +typed_flaky = cast(Callable[[C], C], flaky(max_runs=3, min_passes=1)) + +# These tests run in parallel if pytest-parallel is installed. +# Avoid modifying resources that are shared across tests, +# as this results in test flake. @pytest.fixture(scope="module") -def publisher_client(): +def publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: yield pubsub_v1.PublisherClient() @pytest.fixture(scope="module") -def topic(publisher_client): - topic_path = publisher_client.topic_path(PROJECT, TOPIC) +def regional_publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: + client_options = {"api_endpoint": REGIONAL_ENDPOINT} + publisher = pubsub_v1.PublisherClient(client_options=client_options) + yield publisher + + +@pytest.fixture(scope="module") +def subscription_admin( + subscriber_client: pubsub_v1.SubscriberClient, topic: str +) -> Generator[str, None, None]: + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, SUBSCRIPTION_ADMIN + ) + + try: + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + subscription = subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) + + yield subscription.name + + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + + +@pytest.fixture(scope="module") +def topic(publisher_client: pubsub_v1.PublisherClient) -> Generator[str, None, None]: + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC) try: - topic = publisher_client.get_topic(topic_path) + topic = publisher_client.get_topic(request={"topic": topic_path}) except: # noqa - topic = publisher_client.create_topic(topic_path) + topic = publisher_client.create_topic(request={"name": topic_path}) yield topic.name - publisher_client.delete_topic(topic.name) + publisher_client.delete_topic(request={"topic": topic.name}) +# This topic is only for creating subscriptions, no messages should be published on this topic. @pytest.fixture(scope="module") -def dead_letter_topic(publisher_client): - topic_path = publisher_client.topic_path(PROJECT, DEAD_LETTER_TOPIC) +def unused_topic( + publisher_client: pubsub_v1.PublisherClient, +) -> Generator[str, None, None]: + topic_path = publisher_client.topic_path(PROJECT_ID, UNUSED_TOPIC) try: - dead_letter_topic = publisher_client.get_topic(topic_path) + topic = publisher_client.get_topic(request={"topic": topic_path}) except: # noqa - dead_letter_topic = publisher_client.create_topic(topic_path) + topic = publisher_client.create_topic(request={"name": topic_path}) + + yield topic.name + + publisher_client.delete_topic(request={"topic": topic.name}) + + +@pytest.fixture(scope="module") +def dead_letter_topic( + publisher_client: pubsub_v1.PublisherClient, +) -> Generator[str, None, None]: + topic_path = publisher_client.topic_path(PROJECT_ID, DEAD_LETTER_TOPIC) + + try: + dead_letter_topic = publisher_client.get_topic(request={"topic": topic_path}) + except NotFound: + dead_letter_topic = publisher_client.create_topic(request={"name": topic_path}) yield dead_letter_topic.name - publisher_client.delete_topic(dead_letter_topic.name) + publisher_client.delete_topic(request={"topic": dead_letter_topic.name}) @pytest.fixture(scope="module") -def subscriber_client(): +def exactly_once_delivery_topic( + publisher_client: pubsub_v1.PublisherClient, +) -> Generator[str, None, None]: + topic_path = publisher_client.topic_path(PROJECT_ID, EOD_TOPIC) + + try: + topic = publisher_client.get_topic(request={"topic": topic_path}) + except NotFound: + topic = publisher_client.create_topic(request={"name": topic_path}) + + yield topic.name + + publisher_client.delete_topic(request={"topic": topic.name}) + + +@pytest.fixture(scope="module") +def subscriber_client() -> Generator[pubsub_v1.SubscriberClient, None, None]: subscriber_client = pubsub_v1.SubscriberClient() yield subscriber_client subscriber_client.close() -@pytest.fixture(scope="module") -def subscription_admin(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) +def _publish_messages( + publisher_client: pubsub_v1.PublisherClient, + topic: str, + message_num: int = 5, + **attrs: Any, # noqa: ANN401 +) -> List[str]: + message_ids = [] + for n in range(message_num): + data = f"message {n}".encode("utf-8") + publish_future = publisher_client.publish(topic, data, **attrs) + message_ids.append(publish_future.result()) + return message_ids + + +def test_list_in_topic(subscription_admin: str, capsys: CaptureFixture[str]) -> None: + typed_backoff = cast( + Callable[[C], C], + backoff.on_exception(backoff.expo, AssertionError, max_time=60), + ) + + @typed_backoff + def eventually_consistent_test() -> None: + subscriber.list_subscriptions_in_topic(PROJECT_ID, TOPIC) + out, _ = capsys.readouterr() + assert subscription_admin in out + + eventually_consistent_test() + + +def test_list_in_project(subscription_admin: str, capsys: CaptureFixture[str]) -> None: + typed_backoff = cast( + Callable[[C], C], + backoff.on_exception(backoff.expo, AssertionError, max_time=60), + ) + + @typed_backoff + def eventually_consistent_test() -> None: + subscriber.list_subscriptions_in_project(PROJECT_ID) + out, _ = capsys.readouterr() + assert subscription_admin in out + + eventually_consistent_test() + + +def test_create_subscription( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + subscription_for_create_name = ( + f"subscription-test-subscription-for-create-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_for_create_name + ) try: - subscription = subscriber_client.get_subscription(subscription_path) - except: # noqa - subscription = subscriber_client.create_subscription( - subscription_path, topic=topic + subscriber_client.delete_subscription( + request={"subscription": subscription_path} ) + except NotFound: + pass - yield subscription.name + subscriber.create_subscription(PROJECT_ID, TOPIC, subscription_for_create_name) + + out, _ = capsys.readouterr() + assert f"{subscription_for_create_name}" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + + +def test_optimistic_subscribe( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + publisher_client: pubsub_v1.PublisherClient, + capsys: CaptureFixture[str], +) -> None: + subscription_id = f"subscription_for_optimistic_subscribe-{PY_VERSION}-{UUID}" + subscription_path = subscriber_client.subscription_path(PROJECT_ID, subscription_id) + # Ensure there is no pre-existing subscription. + # So that we can test the case where optimistic subscribe fails. + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass + # Invoke optimistic_subscribe when the subscription is not present. + # This tests scenario where optimistic subscribe fails. + subscriber.optimistic_subscribe(PROJECT_ID, TOPIC, subscription_id, 5) + out, _ = capsys.readouterr() + # Verify optimistic subscription failed. + assert f"Subscription {subscription_path} not found, creating it." in out + # Verify that subscription created due to optimistic subscribe failure. + assert f"Subscription {subscription_path} created" in out + # Verify that subscription didn't already exist. + assert "Successfully subscribed until the timeout passed." not in out -@pytest.fixture(scope="module") -def subscription_sync(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_SYNC) + # Invoke optimistic_subscribe when the subscription is present. + # This tests scenario where optimistic subscribe succeeds. + subscriber.optimistic_subscribe(PROJECT_ID, TOPIC, subscription_id, 5) + + out, _ = capsys.readouterr() + # Verify optimistic subscription succeeded. + assert f"Subscription {subscription_path} not found, creating it." not in out + # Verify that subscription was not created due to optimistic subscribe failure. + assert f"Subscription {subscription_path} created" not in out + # Verify that subscription already existed. + assert "Successfully subscribed until the timeout passed." in out + + # Test case where optimistic subscribe throws an exception other than NotFound + # or TimeoutError. + subscriber.optimistic_subscribe(PROJECT_ID, TOPIC, "123", 5) + out, _ = capsys.readouterr() + assert "Exception occurred when attempting optimistic subscribe:" in out + + # Clean up resources created during test. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + + +def test_create_subscription_with_dead_letter_policy( + subscriber_client: pubsub_v1.SubscriberClient, + dead_letter_topic: str, + capsys: CaptureFixture[str], +) -> None: + subscription_dlq_name = ( + f"subscription-test-subscription-dlq-for-create-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_dlq_name + ) try: - subscription = subscriber_client.get_subscription(subscription_path) - except: # noqa - subscription = subscriber_client.create_subscription( - subscription_path, topic=topic + subscriber_client.delete_subscription( + request={"subscription": subscription_path} ) + except NotFound: + pass - yield subscription.name + subscriber.create_subscription_with_dead_letter_topic( + PROJECT_ID, TOPIC, subscription_dlq_name, DEAD_LETTER_TOPIC + ) - subscriber_client.delete_subscription(subscription.name) + out, _ = capsys.readouterr() + assert f"Subscription created: {subscription_path}" in out + assert f"It will forward dead letter messages to: {dead_letter_topic}" in out + assert f"After {DEFAULT_MAX_DELIVERY_ATTEMPTS} delivery attempts." in out + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) -@pytest.fixture(scope="module") -def subscription_async(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ASYNC) + +def test_receive_with_delivery_attempts( + subscriber_client: pubsub_v1.SubscriberClient, + publisher_client: pubsub_v1.PublisherClient, + topic: str, + dead_letter_topic: str, + capsys: CaptureFixture[str], +) -> None: + from google.cloud.pubsub_v1.types import DeadLetterPolicy + + subscription_dlq_for_receive_name = ( + f"subscription-test-subscription-dlq-for-receive-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_dlq_for_receive_name + ) try: - subscription = subscriber_client.get_subscription(subscription_path) - except: # noqa - subscription = subscriber_client.create_subscription( - subscription_path, topic=topic + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} ) + except NotFound: + request = { + "name": subscription_path, + "topic": topic, + "dead_letter_policy": DeadLetterPolicy( + dead_letter_topic=dead_letter_topic, max_delivery_attempts=10 + ), + } + subscription = subscriber_client.create_subscription(request) + + subscription_dlq = subscription.name + + _ = _publish_messages(publisher_client, topic) + + subscriber.receive_messages_with_delivery_attempts( + PROJECT_ID, subscription_dlq_for_receive_name, 90 + ) - yield subscription.name + out, _ = capsys.readouterr() + assert f"Listening for messages on {subscription_dlq}.." in out + assert "With delivery attempts: " in out - subscriber_client.delete_subscription(subscription.name) + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) -@pytest.fixture(scope="module") -def subscription_dlq(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_DLQ) +def test_update_dead_letter_policy( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + dead_letter_topic: str, + capsys: CaptureFixture[str], +) -> None: + from google.cloud.pubsub_v1.types import DeadLetterPolicy + + subscription_dlq_for_update_name = ( + f"subscription-test-subscription-dlq-for-update-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_dlq_for_update_name + ) try: - subscription = subscriber_client.get_subscription(subscription_path) - except: # noqa - subscription = subscriber_client.create_subscription( - subscription_path, topic=topic + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} ) + except NotFound: + request = { + "name": subscription_path, + "topic": topic, + "dead_letter_policy": DeadLetterPolicy( + dead_letter_topic=dead_letter_topic, max_delivery_attempts=10 + ), + } + subscription = subscriber_client.create_subscription(request) + + subscription_dlq = subscription.name + + subscriber.update_subscription_with_dead_letter_policy( + PROJECT_ID, + TOPIC, + subscription_dlq_for_update_name, + DEAD_LETTER_TOPIC, + UPDATED_MAX_DELIVERY_ATTEMPTS, + ) - yield subscription.name + out, _ = capsys.readouterr() + assert dead_letter_topic in out + assert subscription_dlq in out + assert f"max_delivery_attempts: {UPDATED_MAX_DELIVERY_ATTEMPTS}" in out - subscriber_client.delete_subscription(subscription.name) + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) -def test_list_in_topic(subscription_admin, capsys): - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - subscriber.list_subscriptions_in_topic(PROJECT, TOPIC) - out, _ = capsys.readouterr() - assert subscription_admin in out +def test_remove_dead_letter_policy( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + dead_letter_topic: str, + capsys: CaptureFixture[str], +) -> None: + from google.cloud.pubsub_v1.types import DeadLetterPolicy - eventually_consistent_test() + subscription_dlq_for_remove_name = ( + f"subscription-test-subscription-dlq-for-remove-{PY_VERSION}-{UUID}" + ) + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_dlq_for_remove_name + ) -def test_list_in_project(subscription_admin, capsys): - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - subscriber.list_subscriptions_in_project(PROJECT) - out, _ = capsys.readouterr() - assert subscription_admin in out + request = { + "name": subscription_path, + "topic": topic, + "dead_letter_policy": DeadLetterPolicy( + dead_letter_topic=dead_letter_topic, max_delivery_attempts=10 + ), + } + subscription = subscriber_client.create_subscription(request) - eventually_consistent_test() + subscription_dlq = subscription.name + + subscription_after_update = subscriber.remove_dead_letter_policy( + PROJECT_ID, TOPIC, subscription_dlq_for_remove_name + ) + + out, _ = capsys.readouterr() + assert subscription_dlq in out + assert subscription_after_update.dead_letter_policy.dead_letter_topic == "" + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) -def test_create(subscriber_client): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) +def test_create_subscription_with_ordering( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + subscription_with_ordering_name = ( + f"subscription-test-subscription-with-ordering-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_with_ordering_name + ) try: - subscriber_client.delete_subscription(subscription_path) - except Exception: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: pass - subscriber.create_subscription(PROJECT, TOPIC, SUBSCRIPTION_ADMIN) + subscriber.create_subscription_with_ordering( + PROJECT_ID, TOPIC, subscription_with_ordering_name + ) - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - assert subscriber_client.get_subscription(subscription_path) + out, _ = capsys.readouterr() + assert "Created subscription with ordering" in out + assert f"{subscription_with_ordering_name}" in out + assert "enable_message_ordering: true" in out - eventually_consistent_test() + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) -def test_create_subscription_with_dead_letter_policy( - subscriber_client, publisher_client, topic, dead_letter_topic, capsys -): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_DLQ) - dead_letter_topic_path = publisher_client.topic_path(PROJECT, DEAD_LETTER_TOPIC) +def test_create_subscription_with_filtering( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + subscription_with_filtering_name = ( + f"subscription-test-subscription-with-filtering-{PY_VERSION}-{UUID}" + ) + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_with_filtering_name + ) try: - subscriber_client.delete_subscription(subscription_path) - except Exception: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: pass - subscriber.create_subscription_with_dead_letter_topic( - PROJECT, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC + subscriber.create_subscription_with_filtering( + PROJECT_ID, TOPIC, subscription_with_filtering_name, FILTER + ) + + out, _ = capsys.readouterr() + assert "Created subscription with filtering enabled" in out + assert f"{subscription_with_filtering_name}" in out + assert '"attributes.author=\\"unknown\\""' in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + + +def test_create_subscription_with_exactly_once_delivery( + subscriber_client: pubsub_v1.SubscriberClient, + exactly_once_delivery_topic: str, + capsys: CaptureFixture[str], +) -> None: + subscription_eod_for_create_name = ( + f"subscription-test-subscription-eod-for-create-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_eod_for_create_name + ) + + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass + + subscriber.create_subscription_with_exactly_once_delivery( + PROJECT_ID, EOD_TOPIC, subscription_eod_for_create_name ) out, _ = capsys.readouterr() - assert "Subscription created: " + subscription_path in out - assert "It will forward dead letter messages to: " + dead_letter_topic_path in out - assert "After 10 delivery attempts." in out + assert "Created subscription with exactly once delivery enabled" in out + assert f"{subscription_eod_for_create_name}" in out + assert "enable_exactly_once_delivery: true" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) -def test_create_push(subscriber_client): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) +def test_create_push_subscription( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + push_subscription_for_create_name = ( + f"subscription-test-subscription-push-for-create-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, push_subscription_for_create_name + ) try: - subscriber_client.delete_subscription(subscription_path) - except Exception: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: pass - subscriber.create_push_subscription(PROJECT, TOPIC, SUBSCRIPTION_ADMIN, ENDPOINT) + subscriber.create_push_subscription( + PROJECT_ID, TOPIC, push_subscription_for_create_name, ENDPOINT + ) - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - assert subscriber_client.get_subscription(subscription_path) + out, _ = capsys.readouterr() + assert f"{push_subscription_for_create_name}" in out - eventually_consistent_test() + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) -def test_update(subscriber_client, subscription_admin, capsys): +def test_create_subscription_with_smt( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + subscription_for_create_name = ( + f"subscription-test-subscription-for-create-with-smt-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_for_create_name + ) + + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass + + subscriber.create_subscription_with_smt( + PROJECT_ID, TOPIC, subscription_for_create_name + ) + + out, _ = capsys.readouterr() + assert f"{subscription_for_create_name}" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + + +def test_update_push_subscription( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + push_subscription_for_update_name = ( + f"subscription-test-subscription-push-for-create-{PY_VERSION}-{UUID}" + ) + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, push_subscription_for_update_name + ) + + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) + subscriber.update_push_subscription( - PROJECT, TOPIC, SUBSCRIPTION_ADMIN, NEW_ENDPOINT + PROJECT_ID, TOPIC, push_subscription_for_update_name, NEW_ENDPOINT ) out, _ = capsys.readouterr() assert "Subscription updated" in out + assert f"{push_subscription_for_update_name}" in out + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) -def test_update_dead_letter_policy( - subscriber_client, topic, subscription_dlq, dead_letter_topic, capsys -): - _ = subscriber.update_subscription_with_dead_letter_policy( - PROJECT, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC + +def test_create_push_no_wrapper_subscription( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + push_subscription_for_create_name = ( + f"subscription-test-subscription-push-no-wrapper-for-create-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, push_subscription_for_create_name + ) + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass + + subscriber.create_push_no_wrapper_subscription( + PROJECT_ID, TOPIC, push_subscription_for_create_name, ENDPOINT ) out, _ = capsys.readouterr() - assert "max_delivery_attempts: 20" in out + assert f"{push_subscription_for_create_name}" in out + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) -def test_delete(subscriber_client, subscription_admin): - subscriber.delete_subscription(PROJECT, SUBSCRIPTION_ADMIN) - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - with pytest.raises(Exception): - subscriber_client.get_subscription(subscription_admin) +@pytest.fixture(scope="module") +def bigquery_table() -> Generator[str, None, None]: + client = bigquery.Client() + dataset = bigquery.Dataset(f"{PROJECT_ID}.{BIGQUERY_DATASET_ID}") + dataset.location = "US" + dataset = client.create_dataset(dataset) + + table_id = f"{PROJECT_ID}.{BIGQUERY_DATASET_ID}.{BIGQUERY_TABLE_ID}" + schema = [ + bigquery.SchemaField("data", "STRING", mode="REQUIRED"), + bigquery.SchemaField("message_id", "STRING", mode="REQUIRED"), + bigquery.SchemaField("attributes", "STRING", mode="REQUIRED"), + bigquery.SchemaField("subscription_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("publish_time", "TIMESTAMP", mode="REQUIRED"), + ] + + table = bigquery.Table(table_id, schema=schema) + table = client.create_table(table) + + yield table_id + + client.delete_dataset(dataset, delete_contents=True) + + +def test_create_bigquery_subscription( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + bigquery_table: str, + capsys: CaptureFixture[str], +) -> None: + bigquery_subscription_for_create_name = ( + f"subscription-test-subscription-bigquery-for-create-{PY_VERSION}-{UUID}" + ) - eventually_consistent_test() + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, bigquery_subscription_for_create_name + ) + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass + + subscriber.create_bigquery_subscription( + PROJECT_ID, TOPIC, bigquery_subscription_for_create_name, bigquery_table + ) + + out, _ = capsys.readouterr() + assert f"{bigquery_subscription_for_create_name}" in out + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) -def _publish_messages(publisher_client, topic): - for n in range(5): - data = u"message {}".format(n).encode("utf-8") - publish_future = publisher_client.publish( - topic, data=data, origin="python-sample" + +@pytest.fixture(scope="module") +def cloudstorage_bucket() -> Generator[str, None, None]: + storage_client = storage.Client() + + bucket_name = CLOUDSTORAGE_BUCKET + + bucket = storage_client.create_bucket(bucket_name) + print(f"Bucket {bucket.name} created.") + + yield bucket.name + + bucket.delete() + + +def test_create_cloudstorage_subscription( + subscriber_client: pubsub_v1.SubscriberClient, + unused_topic: str, + cloudstorage_bucket: str, + capsys: CaptureFixture[str], +) -> None: + cloudstorage_subscription_for_create_name = ( + f"subscription-test-subscription-cloudstorage-for-create-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, cloudstorage_subscription_for_create_name + ) + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass + + subscriber.create_cloudstorage_subscription( + PROJECT_ID, + # We have to use a topic with no messages published, + # so that the bucket will be empty and can be deleted. + UNUSED_TOPIC, + cloudstorage_subscription_for_create_name, + cloudstorage_bucket, + ) + + out, _ = capsys.readouterr() + assert f"{cloudstorage_subscription_for_create_name}" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + + +def test_delete_subscription( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, +) -> None: + subscription_for_delete_name = ( + f"subscription-test-subscription-for-delete-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_for_delete_name + ) + + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} ) - publish_future.result() + subscriber.delete_subscription(PROJECT_ID, subscription_for_delete_name) -def test_receive(publisher_client, topic, subscription_async, capsys): - _publish_messages(publisher_client, topic) + with pytest.raises(Exception): + subscriber_client.get_subscription( + request={"subscription": subscription_for_delete_name} + ) - subscriber.receive_messages(PROJECT, SUBSCRIPTION_ASYNC, 5) + # No clean up required. + + +def test_receive( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + publisher_client: pubsub_v1.PublisherClient, + capsys: CaptureFixture[str], +) -> None: + subscription_async_for_receive_name = ( + f"subscription-test-subscription-async-for-receive-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_async_for_receive_name + ) + + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) + + _ = _publish_messages(publisher_client, topic) + + subscriber.receive_messages(PROJECT_ID, subscription_async_for_receive_name, 5) out, _ = capsys.readouterr() assert "Listening" in out - assert subscription_async in out + assert subscription_async_for_receive_name in out assert "message" in out + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + def test_receive_with_custom_attributes( - publisher_client, topic, subscription_async, capsys -): + subscriber_client: pubsub_v1.SubscriberClient, + publisher_client: pubsub_v1.PublisherClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + subscription_async_receive_with_custom_name = ( + f"subscription-test-subscription-async-receive-with-custom-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_async_receive_with_custom_name + ) + + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) - _publish_messages(publisher_client, topic) + _ = _publish_messages(publisher_client, topic, origin="python-sample") - subscriber.receive_messages_with_custom_attributes(PROJECT, SUBSCRIPTION_ASYNC, 5) + subscriber.receive_messages_with_custom_attributes( + PROJECT_ID, subscription_async_receive_with_custom_name, 5 + ) out, _ = capsys.readouterr() + assert subscription_async_receive_with_custom_name in out assert "message" in out assert "origin" in out assert "python-sample" in out + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) -def test_receive_with_flow_control(publisher_client, topic, subscription_async, capsys): - _publish_messages(publisher_client, topic) +def test_receive_with_flow_control( + subscriber_client: pubsub_v1.SubscriberClient, + publisher_client: pubsub_v1.PublisherClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + subscription_async_receive_with_flow_control_name = f"subscription-test-subscription-async-receive-with-flow-control-{PY_VERSION}-{UUID}" - subscriber.receive_messages_with_flow_control(PROJECT, SUBSCRIPTION_ASYNC, 5) + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_async_receive_with_flow_control_name + ) + + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) + + _ = _publish_messages(publisher_client, topic) + + subscriber.receive_messages_with_flow_control( + PROJECT_ID, subscription_async_receive_with_flow_control_name, 5 + ) out, _ = capsys.readouterr() assert "Listening" in out - assert subscription_async in out + assert subscription_async_receive_with_flow_control_name in out assert "message" in out + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + + +def test_receive_with_blocking_shutdown( + subscriber_client: pubsub_v1.SubscriberClient, + publisher_client: pubsub_v1.PublisherClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + subscription_async_receive_with_blocking_name = f"subscription-test-subscription-async-receive-with-blocking-shutdown-{PY_VERSION}-{UUID}" + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_async_receive_with_blocking_name + ) + + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) -def test_receive_synchronously(publisher_client, topic, subscription_sync, capsys): - _publish_messages(publisher_client, topic) + _received = re.compile(r".*received.*message.*", flags=re.IGNORECASE) + _done = re.compile(r".*done processing.*message.*", flags=re.IGNORECASE) + _canceled = re.compile(r".*streaming pull future canceled.*", flags=re.IGNORECASE) + _shut_down = re.compile(r".*done waiting.*stream shutdown.*", flags=re.IGNORECASE) - subscriber.synchronous_pull(PROJECT, SUBSCRIPTION_SYNC) + _ = _publish_messages(publisher_client, topic, message_num=3) + + subscriber.receive_messages_with_blocking_shutdown( + PROJECT_ID, subscription_async_receive_with_blocking_name, timeout=5.0 + ) out, _ = capsys.readouterr() - assert "Done." in out + out_lines = out.splitlines() + + msg_received_lines = [ + i for i, line in enumerate(out_lines) if _received.search(line) + ] + msg_done_lines = [i for i, line in enumerate(out_lines) if _done.search(line)] + stream_canceled_lines = [ + i for i, line in enumerate(out_lines) if _canceled.search(line) + ] + shutdown_done_waiting_lines = [ + i for i, line in enumerate(out_lines) if _shut_down.search(line) + ] + try: + assert "Listening" in out + assert subscription_async_receive_with_blocking_name in out + + assert len(stream_canceled_lines) == 1 + assert len(shutdown_done_waiting_lines) == 1 + assert len(msg_received_lines) == 3 + assert len(msg_done_lines) == 3 + + # The stream should have been canceled *after* receiving messages, but before + # message processing was done. + assert msg_received_lines[-1] < stream_canceled_lines[0] < msg_done_lines[0] + + # Yet, waiting on the stream shutdown should have completed *after* + # the processing of received messages has ended. + assert msg_done_lines[-1] < shutdown_done_waiting_lines[0] + except AssertionError: # pragma: NO COVER + from pprint import pprint + + pprint(out_lines) # To make possible flakiness debugging easier. + raise + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + + +def test_receive_messages_with_exactly_once_delivery_enabled( + subscriber_client: pubsub_v1.SubscriberClient, + regional_publisher_client: pubsub_v1.PublisherClient, + exactly_once_delivery_topic: str, + capsys: CaptureFixture[str], +) -> None: + subscription_eod_for_receive_name = ( + f"subscription-test-subscription-eod-for-receive-{PY_VERSION}-{UUID}" + ) -def test_receive_synchronously_with_lease( - publisher_client, topic, subscription_sync, capsys -): - _publish_messages(publisher_client, topic) + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_eod_for_receive_name + ) - subscriber.synchronous_pull_with_lease_management(PROJECT, SUBSCRIPTION_SYNC) + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={ + "name": subscription_path, + "topic": exactly_once_delivery_topic, + "enable_exactly_once_delivery": True, + } + ) + + message_ids = _publish_messages( + regional_publisher_client, exactly_once_delivery_topic + ) + + subscriber.receive_messages_with_exactly_once_delivery_enabled( + PROJECT_ID, subscription_eod_for_receive_name, 200 + ) out, _ = capsys.readouterr() - assert "Done." in out + assert subscription_eod_for_receive_name in out + for message_id in message_ids: + assert message_id in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + + +def test_listen_for_errors( + subscriber_client: pubsub_v1.SubscriberClient, + publisher_client: pubsub_v1.PublisherClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + subscription_async_listen = ( + f"subscription-test-subscription-async-listen-{PY_VERSION}-{UUID}" + ) + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_async_listen + ) -def test_listen_for_errors(publisher_client, topic, subscription_async, capsys): + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) - _publish_messages(publisher_client, topic) + _ = _publish_messages(publisher_client, topic) - subscriber.listen_for_errors(PROJECT, SUBSCRIPTION_ASYNC, 5) + subscriber.listen_for_errors(PROJECT_ID, subscription_async_listen, 5) out, _ = capsys.readouterr() - assert "Listening" in out - assert subscription_async in out + assert subscription_path in out assert "threw an exception" in out + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) -def test_receive_with_delivery_attempts( - publisher_client, topic, subscription_dlq, dead_letter_topic, capsys -): - _publish_messages(publisher_client, topic) - subscriber.receive_messages_with_delivery_attempts(PROJECT, SUBSCRIPTION_DLQ, 10) +def test_receive_synchronously( + subscriber_client: pubsub_v1.SubscriberClient, + publisher_client: pubsub_v1.PublisherClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + subscription_sync_for_receive_name = ( + f"subscription-test-subscription-sync-for-receive-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_sync_for_receive_name + ) + + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) + + _ = _publish_messages(publisher_client, topic) + + subscriber.synchronous_pull(PROJECT_ID, subscription_sync_for_receive_name) + + out, _ = capsys.readouterr() + + assert "Received" in out + assert f"{subscription_sync_for_receive_name}" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + + +def test_receive_messages_with_concurrency_control( + subscriber_client: pubsub_v1.SubscriberClient, + publisher_client: pubsub_v1.PublisherClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + subscription_async_receive_messages_with_concurrency_control_name = f"subscription-test-subscription-async-receive-messages-with-concurrency-control-{PY_VERSION}-{UUID}" + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_async_receive_messages_with_concurrency_control_name + ) + + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) + + _ = _publish_messages(publisher_client, topic) + + subscriber.receive_messages_with_flow_control( + PROJECT_ID, subscription_async_receive_messages_with_concurrency_control_name, 5 + ) out, _ = capsys.readouterr() assert "Listening" in out - assert subscription_dlq in out - assert "Received message: " in out - assert "message 4" in out - assert "With delivery attempts: " in out + assert subscription_async_receive_messages_with_concurrency_control_name in out + assert "message" in out + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) -def test_remove_dead_letter_policy(subscriber_client, subscription_dlq): - subscription_after_update = subscriber.remove_dead_letter_policy( - PROJECT, TOPIC, SUBSCRIPTION_DLQ + +@typed_flaky +def test_receive_synchronously_with_lease( + subscriber_client: pubsub_v1.SubscriberClient, + publisher_client: pubsub_v1.PublisherClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + subscription_sync_for_receive_with_lease_name = f"subscription-test-subscription-sync-for-receive-with-lease-{PY_VERSION}-{UUID}" + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_sync_for_receive_with_lease_name ) - assert subscription_after_update.dead_letter_policy.dead_letter_topic == "" + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) + + _ = _publish_messages(publisher_client, topic, message_num=10) + # Pausing 10s to allow the subscriber to establish the connection + # because sync pull often returns fewer messages than requested. + # The intention is to fix flaky tests reporting errors like + # `google.api_core.exceptions.Unknown: None Stream removed` as + # in https://github.com/googleapis/python-pubsub/issues/341. + time.sleep(10) + subscriber.synchronous_pull_with_lease_management( + PROJECT_ID, subscription_sync_for_receive_with_lease_name + ) + + out, _ = capsys.readouterr() + + # Sometimes the subscriber only gets 1 or 2 messages and test fails. + # I think it's ok to consider those cases as passing. + assert "Received and acknowledged" in out + assert f"messages from {subscription_path}." in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) diff --git a/samples/snippets/utilities/us_states_pb2.py b/samples/snippets/utilities/us_states_pb2.py new file mode 100644 index 000000000..93af674bd --- /dev/null +++ b/samples/snippets/utilities/us_states_pb2.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: us-states.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0fus-states.proto\x12\tutilities\"-\n\nStateProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tpost_abbr\x18\x02 \x01(\tb\x06proto3') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'us_states_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _STATEPROTO._serialized_start=30 + _STATEPROTO._serialized_end=75 +# @@protoc_insertion_point(module_scope) diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh index ff599eb2a..120b0ddc4 100755 --- a/scripts/decrypt-secrets.sh +++ b/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2024 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" ) # Work from the project root. cd $ROOT +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + # Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ > testing/test-env.sh gcloud secrets versions access latest \ --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ > testing/service-account.json gcloud secrets versions access latest \ --secret="python-docs-samples-client-secrets" \ - > testing/client-secrets.json \ No newline at end of file + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py index d309d6e97..8f5e248a0 100644 --- a/scripts/readme-gen/readme_gen.py +++ b/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2016 Google Inc +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,19 +28,22 @@ jinja_env = jinja2.Environment( trim_blocks=True, loader=jinja2.FileSystemLoader( - os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) -README_TMPL = jinja_env.get_template('README.tmpl.rst') +README_TMPL = jinja_env.get_template("README.tmpl.rst") def get_help(file): - return subprocess.check_output(['python', file, '--help']).decode() + return subprocess.check_output(["python", file, "--help"]).decode() def main(): parser = argparse.ArgumentParser() - parser.add_argument('source') - parser.add_argument('--destination', default='README.rst') + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") args = parser.parse_args() @@ -48,9 +51,9 @@ def main(): root = os.path.dirname(source) destination = os.path.join(root, args.destination) - jinja_env.globals['get_help'] = get_help + jinja_env.globals["get_help"] = get_help - with io.open(source, 'r') as f: + with io.open(source, "r") as f: config = yaml.load(f) # This allows get_help to execute in the right directory. @@ -58,9 +61,9 @@ def main(): output = README_TMPL.render(config) - with io.open(destination, 'w') as f: + with io.open(destination, "w") as f: f.write(output) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst index a0406dba8..6f069c6c8 100644 --- a/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -12,7 +12,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. +#. Create a virtualenv. Samples are compatible with Python 3.7+. .. code-block:: bash diff --git a/setup.py b/setup.py index 528bb66a2..74a11ebf6 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,5 @@ -# Copyright 2018 Google LLC +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,35 +12,52 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# + +# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` +# The source of truth for this file is `.librarian/generator-input` import io import os -import setuptools +import setuptools # type: ignore - -# Package metadata. +package_root = os.path.abspath(os.path.dirname(__file__)) name = "google-cloud-pubsub" + + description = "Google Cloud Pub/Sub API client library" -version = "1.6.1" -# Should be one of: -# 'Development Status :: 3 - Alpha' -# 'Development Status :: 4 - Beta' -# 'Development Status :: 5 - Production/Stable' -release_status = "Development Status :: 5 - Production/Stable" -dependencies = [ - # google-api-core[grpc] 1.17.0 up to 1.19.1 causes problems with stream - # recovery, thus those versions should not be used. - # https://github.com/googleapis/python-pubsub/issues/74 - "google-api-core[grpc] >= 1.14.0, != 1.17.*, != 1.18.*, != 1.19.*", - "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", - 'enum34; python_version < "3.4"', -] -extras = {} +version = {} +with open(os.path.join(package_root, "google/pubsub/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" -# Setup boilerplate below this line. +dependencies = [ + "grpcio >= 1.51.3, < 2.0.0; python_version < '3.14'", # https://github.com/googleapis/python-pubsub/issues/609 + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + # google-api-core >= 1.34.0 is allowed in order to support google-api-core 1.x + "google-auth >= 2.14.1, <3.0.0", + "google-api-core[grpc] >= 1.34.0, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0", + "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'", + "proto-plus >= 1.25.0, < 2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, < 1.0.0", + "grpcio-status >= 1.33.2", + "opentelemetry-api <= 1.22.0; python_version<='3.7'", + "opentelemetry-api >= 1.27.0; python_version>='3.8'", + "opentelemetry-sdk <= 1.22.0; python_version<='3.7'", + "opentelemetry-sdk >= 1.27.0; python_version>='3.8'", +] +extras = {"libcst": "libcst >= 0.3.10"} +url = "https://github.com/googleapis/python-pubsub" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -47,18 +65,12 @@ with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() -# Only include packages under the 'google' namespace. Do not include tests, -# benchmarks, etc. packages = [ - package for package in setuptools.find_packages() if package.startswith("google") + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") ] -# Determine which namespaces are needed. -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") - - setuptools.setup( name=name, version=version, @@ -67,27 +79,29 @@ author="Google LLC", author_email="googleapis-packages@google.com", license="Apache 2.0", - url="https://github.com/googleapis/python-pubsub", + url=url, classifiers=[ release_status, "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Operating System :: OS Independent", "Topic :: Internet", ], platforms="Posix; MacOS X; Windows", packages=packages, - namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*", + python_requires=">=3.7", include_package_data=True, zip_safe=False, ) diff --git a/synth.metadata b/synth.metadata deleted file mode 100644 index f67fbeec5..000000000 --- a/synth.metadata +++ /dev/null @@ -1,51 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "c8f63788636c2e3436c8ce6a01ef3b59e3df772a" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "b882b8e6bfcd708042ff00f7adc67ce750817dd0", - "internalRef": "318028816" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "303271797a360f8a439203413f13a160f2f5b3b4" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "303271797a360f8a439203413f13a160f2f5b3b4" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "303271797a360f8a439203413f13a160f2f5b3b4" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "pubsub", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" - } - } - ] -} \ No newline at end of file diff --git a/synth.py b/synth.py deleted file mode 100644 index 0e2c96e42..000000000 --- a/synth.py +++ /dev/null @@ -1,282 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" - -import textwrap - -import synthtool as s -from synthtool import gcp -from synthtool.languages import python - -gapic = gcp.GAPICBazel() -common = gcp.CommonTemplates() -version = "v1" - -# ---------------------------------------------------------------------------- -# Generate pubsub GAPIC layer -# ---------------------------------------------------------------------------- -library = gapic.py_library( - service="pubsub", - version=version, - bazel_target="//google/pubsub/v1:pubsub-v1-py", - include_protos=True, -) -s.move( - library, - excludes=[ - "docs/**/*", - "nox.py", - "README.rst", - "setup.py", - "google/cloud/pubsub_v1/__init__.py", - "google/cloud/pubsub_v1/types.py", - ], -) - -# Adjust tests to import the clients directly. -s.replace( - "tests/unit/gapic/v1/test_publisher_client_v1.py", - "from google.cloud import pubsub_v1", - "from google.cloud.pubsub_v1.gapic import publisher_client", -) - -s.replace( - "tests/unit/gapic/v1/test_publisher_client_v1.py", " pubsub_v1", " publisher_client" -) - -s.replace( - "tests/unit/gapic/v1/test_subscriber_client_v1.py", - "from google.cloud import pubsub_v1", - "from google.cloud.pubsub_v1.gapic import subscriber_client", -) - -s.replace( - "tests/unit/gapic/v1/test_subscriber_client_v1.py", - " pubsub_v1", - " subscriber_client", -) - -# DEFAULT SCOPES are being used. so let's force them in. -s.replace( - "google/cloud/pubsub_v1/gapic/*er_client.py", - "# The name of the interface for this client. This is the key used to", - """# The scopes needed to make gRPC calls to all of the methods defined in - # this service - _DEFAULT_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', ) - - \g<0>""", -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "import google.api_core.gapic_v1.method\n", - "\g<0>import google.api_core.path_template\n", -) - -# Doc strings are formatted poorly -s.replace( - "google/cloud/pubsub_v1/proto/pubsub_pb2.py", - 'DESCRIPTOR = _MESSAGESTORAGEPOLICY,\n\s+__module__.*\n\s+,\n\s+__doc__ = """', - "\g<0>A message storage policy.\n\n\n ", -) - -s.replace( - "google/cloud/pubsub_v1/gapic/subscriber_client.py", - "subscription \(str\): The subscription whose backlog .*\n(.*\n)+?" - "\s+Format is .*", - """subscription (str): The subscription whose backlog the snapshot retains. - Specifically, the created snapshot is guaranteed to retain: \\ - (a) The existing backlog on the subscription. More precisely, this is \\ - defined as the messages in the subscription's backlog that are \\ - unacknowledged upon the successful completion of the \\ - `CreateSnapshot` request; as well as: \\ - (b) Any messages published to the subscription's topic following the \\ - successful completion of the CreateSnapshot request. \\ - - Format is ``projects/{project}/subscriptions/{sub}``.""", -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "import functools\n", - "import collections\n" - "from copy import deepcopy\n\g<0>" -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "import pkg_resources\n", - "\g<0>import six\n" -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "class PublisherClient", - """# TODO: remove conditional import after Python 2 support is dropped -if six.PY2: - from collections import Mapping -else: - from collections.abc import Mapping - - -def _merge_dict(d1, d2): - # Modifies d1 in-place to take values from d2 - # if the nested keys from d2 are present in d1. - # https://stackoverflow.com/a/10704003/4488789 - for k, v2 in d2.items(): - v1 = d1.get(k) # returns None if v1 has no such key - if v1 is None: - raise Exception("{} is not recognized by client_config".format(k)) - if isinstance(v1, Mapping) and isinstance(v2, Mapping): - _merge_dict(v1, v2) - else: - d1[k] = v2 - return d1 - \n\n\g<0>""" -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "client_config \(dict\): DEPRECATED.", - "client_config (dict):" -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "# Raise deprecation warnings .*\n.*\n.*\n.*\n.*\n.*\n", - """default_client_config = deepcopy(publisher_client_config.config) - - if client_config is None: - client_config = default_client_config - else: - client_config = _merge_dict(default_client_config, client_config) - """ -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "~google.api_core.page_iterator.PageIterator", - "~google.api_core.page_iterator.GRPCIterator" -) - -s.replace( - "google/cloud/pubsub_v1/gapic/subscriber_client.py", - "~google.api_core.page_iterator.PageIterator", - "~google.api_core.page_iterator.GRPCIterator" -) - -# Temporary fixup for 'grpc-google-iam-vi 0.12.4' (before generation). -s.replace( - "google/cloud/pubsub_v1/gapic/transports/*_grpc_transport.py", - "from google.iam.v1 import iam_policy_pb2", - "from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2", -) - -# Monkey patch the streaming_pull() GAPIC method to disable pre-fetching stream -# results. -s.replace( - "google/cloud/pubsub_v1/gapic/subscriber_client.py", - r"return self\._inner_api_calls\['streaming_pull'\]\(.*", - """ - # Wrappers in api-core should not automatically pre-fetch the first - # stream result, as this breaks the stream when re-opening it. - # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 - self.transport.streaming_pull._prefetch_first_result_ = False - - \g<0>""" -) - -# Add missing blank line before Attributes: in generated docstrings -# https://github.com/googleapis/protoc-docs-plugin/pull/31 -s.replace( - "google/cloud/pubsub_v1/proto/pubsub_pb2.py", - "(\s+)Attributes:", - "\n\g<1>Attributes:" -) - -# Fix incomplete docstring examples. -s.replace( - "google/cloud/pubsub_v1/gapic/subscriber_client.py", - r"\s+>>> subscription = \{'ack_deadline_seconds': ack_deadline_seconds\}", - textwrap.indent( - """ ->>> subscription_name = 'projects/my-project/subscriptions/my-subscription' ->>> subscription = { -... 'name': subscription_name, -... 'ack_deadline_seconds': ack_deadline_seconds, -... }""", - prefix=" " * 12, - ) -) - -s.replace( - "google/cloud/pubsub_v1/gapic/subscriber_client.py", - r"\s+>>> snapshot = \{'expire_time': expire_time\}", - textwrap.indent( - """ ->>> snapshot_name = 'projects/my-project/snapshots/my-snapshot' ->>> snapshot = { -... 'name': snapshot_name, -... 'expire_time': expire_time, -... }""", - prefix=" " * 12, - ) -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - r"\s+>>> # TODO: Initialize `topic`:\n\s+>>> topic = \{\}\n", - textwrap.indent( - """ ->>> topic_name = 'projects/my-project/topics/my-topic' ->>> topic_labels = {'source': 'external'} ->>> topic = {'name': topic_name, 'labels': topic_labels} -""", - prefix=" " * 12, - ), -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - r"\s+>>> # TODO: Initialize `update_mask`:\n\s+>>> update_mask = \{\}\n", - textwrap.indent( - """ ->>> paths_element = 'labels' ->>> paths = [paths_element] ->>> update_mask = {'paths': paths} -""", - prefix=" " * 12, - ), -) - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -templated_files = gcp.CommonTemplates().py_library( - unit_cov_level=97, - cov_level=99, - system_test_external_dependencies=["psutil"], - samples=True, -) -s.move(templated_files) - -# ---------------------------------------------------------------------------- -# Samples templates -# ---------------------------------------------------------------------------- -python.py_samples() - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt new file mode 100644 index 000000000..ef1c92fff --- /dev/null +++ b/testing/constraints-3.10.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt new file mode 100644 index 000000000..ef1c92fff --- /dev/null +++ b/testing/constraints-3.11.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt new file mode 100644 index 000000000..ef1c92fff --- /dev/null +++ b/testing/constraints-3.12.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt new file mode 100644 index 000000000..2ae5a677e --- /dev/null +++ b/testing/constraints-3.13.txt @@ -0,0 +1,13 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/testing/constraints-3.14.txt b/testing/constraints-3.14.txt new file mode 100644 index 000000000..2ae5a677e --- /dev/null +++ b/testing/constraints-3.14.txt @@ -0,0 +1,13 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt new file mode 100644 index 000000000..08db5de87 --- /dev/null +++ b/testing/constraints-3.7.txt @@ -0,0 +1,13 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +google-auth==2.14.1 +proto-plus==1.22.0 +protobuf==3.20.2 +grpc-google-iam-v1==0.12.4 +grpcio==1.51.3 +grpcio-status==1.33.2 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt new file mode 100644 index 000000000..30520e2d0 --- /dev/null +++ b/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core==1.34.0 +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt new file mode 100644 index 000000000..ef1c92fff --- /dev/null +++ b/testing/constraints-3.9.txt @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 000000000..cbf94b283 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/system.py b/tests/system.py index 1694c5022..e1af74402 100644 --- a/tests/system.py +++ b/tests/system.py @@ -14,17 +14,25 @@ from __future__ import absolute_import +import concurrent.futures import datetime import itertools import operator as op import os import psutil +import sys import threading import time +from typing import Any, Callable, cast, TypeVar -import mock +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + +from flaky import flaky import pytest -import six import google.auth from google.api_core import exceptions as core_exceptions @@ -32,35 +40,39 @@ from google.cloud.pubsub_v1 import exceptions from google.cloud.pubsub_v1 import futures from google.cloud.pubsub_v1 import types +from google.pubsub_v1 import types as gapic_types from test_utils.system import unique_resource_id +C = TypeVar("C", bound=Callable[..., Any]) +typed_flaky = cast(Callable[[C], C], flaky(max_runs=5, min_passes=1)) + -@pytest.fixture(scope=u"module") +@pytest.fixture(scope="module") def project(): _, default_project = google.auth.default() yield default_project -@pytest.fixture(scope=u"module") -def publisher(): - yield pubsub_v1.PublisherClient() +@pytest.fixture(params=["grpc", "rest"]) +def publisher(request): + yield pubsub_v1.PublisherClient(transport=request.param) -@pytest.fixture(scope="module") -def subscriber(): - yield pubsub_v1.SubscriberClient() +@pytest.fixture(params=["grpc", "rest"]) +def subscriber(request): + yield pubsub_v1.SubscriberClient(transport=request.param) @pytest.fixture -def topic_path(project, publisher): +def topic_path_base(project, publisher): topic_name = "t" + unique_resource_id("-") yield publisher.topic_path(project, topic_name) @pytest.fixture -def subscription_path(project, subscriber): +def subscription_path_base(project, subscriber): sub_name = "s" + unique_resource_id("-") yield subscriber.subscription_path(project, sub_name) @@ -71,31 +83,39 @@ def cleanup(): yield registry # Perform all clean up. - for to_call, argument in registry: - to_call(argument) + for to_call, args, kwargs in registry: + try: + to_call(*args, **kwargs) + except core_exceptions.NotFound: + pass -def test_publish_messages(publisher, topic_path, cleanup): +def test_publish_messages(publisher, topic_path_base, cleanup): + # Customize topic path to test. + topic_path = topic_path_base + "-publish-messages" # Make sure the topic gets deleted. - cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) futures = [ publisher.publish( topic_path, b"The hail in Wales falls mainly on the snails.", num=str(i) ) - for i in six.moves.range(500) + for i in range(500) ] for future in futures: result = future.result() - assert isinstance(result, six.string_types) + assert isinstance(result, str) -def test_publish_large_messages(publisher, topic_path, cleanup): +def test_publish_large_messages(topic_path_base, cleanup): + publisher = pubsub_v1.PublisherClient(transport="grpc") + # Customize topic path to test. + topic_path = topic_path_base + "-publish-large-messages" # Make sure the topic gets deleted. - cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) # Each message should be smaller than 10**7 bytes (the server side limit for # PublishRequest), but all messages combined in a PublishRequest should @@ -103,14 +123,14 @@ def test_publish_large_messages(publisher, topic_path, cleanup): # cases well. # Mind that the total PublishRequest size must still be smaller than # 10 * 1024 * 1024 bytes in order to not exceed the max request body size limit. - msg_data = b"x" * (2 * 10 ** 6) + msg_data = b"x" * (2 * 10**6) publisher.batch_settings = types.BatchSettings( max_bytes=11 * 1000 * 1000, # more than the server limit of 10 ** 7 max_latency=2.0, # so that autocommit happens after publishing all messages max_messages=100, ) - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) futures = [publisher.publish(topic_path, msg_data, num=str(i)) for i in range(5)] @@ -119,27 +139,33 @@ def test_publish_large_messages(publisher, topic_path, cleanup): # be no "InvalidArgument: request_size is too large" error. for future in futures: result = future.result(timeout=10) - assert isinstance(result, six.string_types) # the message ID + assert isinstance(result, str) # the message ID def test_subscribe_to_messages( - publisher, topic_path, subscriber, subscription_path, cleanup + publisher, topic_path_base, subscription_path_base, cleanup ): + subscriber = pubsub_v1.SubscriberClient(transport="grpc") + # Customize topic path to test. + topic_path = topic_path_base + "-subscribe-to-messages" + subscription_path = subscription_path_base + "-subscribe-to-messages" # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # Create a topic. - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) # Subscribe to the topic. This must happen before the messages # are published. - subscriber.create_subscription(subscription_path, topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) # Publish some messages. futures = [ publisher.publish(topic_path, b"Wooooo! The claaaaaw!", num=str(index)) - for index in six.moves.range(50) + for index in range(50) ] # Make sure the publish completes. @@ -151,7 +177,7 @@ def test_subscribe_to_messages( # that we got everything at least once. callback = AckCallback() future = subscriber.subscribe(subscription_path, callback) - for second in six.moves.range(10): + for second in range(10): time.sleep(1) # The callback should have fired at least fifty times, but it @@ -166,23 +192,30 @@ def test_subscribe_to_messages( def test_subscribe_to_messages_async_callbacks( - publisher, topic_path, subscriber, subscription_path, cleanup + publisher, topic_path_base, subscription_path_base, cleanup ): + subscriber = pubsub_v1.SubscriberClient(transport="grpc") + # Customize topic path to test. + custom_str = "-subscribe-to-messages-async-callback" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # Create a topic. - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) # Subscribe to the topic. This must happen before the messages # are published. - subscriber.create_subscription(subscription_path, topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) # Publish some messages. futures = [ publisher.publish(topic_path, b"Wooooo! The claaaaaw!", num=str(index)) - for index in six.moves.range(2) + for index in range(2) ] # Make sure the publish completes. @@ -195,7 +228,7 @@ def test_subscribe_to_messages_async_callbacks( # Actually open the subscription and hold it open for a few seconds. future = subscriber.subscribe(subscription_path, callback) - for second in six.moves.range(5): + for second in range(5): time.sleep(4) # The callback should have fired at least two times, but it may @@ -216,29 +249,34 @@ def test_subscribe_to_messages_async_callbacks( def test_creating_subscriptions_with_non_default_settings( - publisher, subscriber, project, topic_path, subscription_path, cleanup + publisher, subscriber, project, topic_path_base, subscription_path_base, cleanup ): + # Customize topic path to test. + custom_str = "-creating-subscriptions-with-non-default-settings" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # create a topic and a subscription, customize the latter's policy - publisher.create_topic(topic_path) - - msg_retention_duration = {"seconds": 911} - expiration_policy = {"ttl": {"seconds": 90210}} - new_subscription = subscriber.create_subscription( - subscription_path, - topic_path, - ack_deadline_seconds=30, - retain_acked_messages=True, - message_retention_duration=msg_retention_duration, - expiration_policy=expiration_policy, - ) + publisher.create_topic(name=topic_path) + + request = { + "name": subscription_path, + "topic": topic_path, + "ack_deadline_seconds": 30, + "retain_acked_messages": True, + "message_retention_duration": {"seconds": 911}, + "expiration_policy": {"ttl": {"seconds": 90210}}, # 1 day, 3810 seconds + } + new_subscription = subscriber.create_subscription(request) # fetch the subscription and check its settings - project_path = subscriber.project_path(project) - subscriptions = subscriber.list_subscriptions(project_path) + project_path = f"projects/{project}" + subscriptions = subscriber.list_subscriptions(project=project_path) subscriptions = [sub for sub in subscriptions if sub.topic == topic_path] assert len(subscriptions) == 1 @@ -248,7 +286,9 @@ def test_creating_subscriptions_with_non_default_settings( assert subscription.ack_deadline_seconds == 30 assert subscription.retain_acked_messages assert subscription.message_retention_duration.seconds == 911 - assert subscription.expiration_policy.ttl.seconds == 90210 + assert subscription.expiration_policy.ttl == datetime.timedelta( + days=1, seconds=3810 + ) def test_listing_project_topics(publisher, project, cleanup): @@ -257,11 +297,11 @@ def test_listing_project_topics(publisher, project, cleanup): for i in range(1, 4) ] for topic in topic_paths: - cleanup.append((publisher.delete_topic, topic)) - publisher.create_topic(topic) + cleanup.append((publisher.delete_topic, (), {"topic": topic})) + publisher.create_topic(name=topic) - project_path = publisher.project_path(project) - project_topics = publisher.list_topics(project_path) + project_path = f"projects/{project}" + project_topics = publisher.list_topics(project=project_path) project_topics = set(t.name for t in project_topics) # there might be other topics in the project, thus do a "is subset" check @@ -275,8 +315,8 @@ def test_listing_project_subscriptions(publisher, subscriber, project, cleanup): publisher.topic_path(project, "topic-2" + unique_resource_id(".")), ] for topic in topic_paths: - cleanup.append((publisher.delete_topic, topic)) - publisher.create_topic(topic) + cleanup.append((publisher.delete_topic, (), {"topic": topic})) + publisher.create_topic(name=topic) # create subscriptions subscription_paths = [ @@ -287,12 +327,14 @@ def test_listing_project_subscriptions(publisher, subscriber, project, cleanup): ] for i, subscription in enumerate(subscription_paths): topic = topic_paths[i % 2] - cleanup.append((subscriber.delete_subscription, subscription)) - subscriber.create_subscription(subscription, topic) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription}) + ) + subscriber.create_subscription(name=subscription, topic=topic) # retrieve subscriptions and check that the list matches the expected - project_path = subscriber.project_path(project) - subscriptions = subscriber.list_subscriptions(project_path) + project_path = f"projects/{project}" + subscriptions = subscriber.list_subscriptions(project=project_path) subscriptions = set(s.name for s in subscriptions) # there might be other subscriptions in the project, thus do a "is subset" check @@ -306,8 +348,8 @@ def test_listing_topic_subscriptions(publisher, subscriber, project, cleanup): publisher.topic_path(project, "topic-2" + unique_resource_id(".")), ] for topic in topic_paths: - cleanup.append((publisher.delete_topic, topic)) - publisher.create_topic(topic) + cleanup.append((publisher.delete_topic, (), {"topic": topic})) + publisher.create_topic(name=topic) # create subscriptions subscription_paths = [ @@ -318,31 +360,36 @@ def test_listing_topic_subscriptions(publisher, subscriber, project, cleanup): ] for i, subscription in enumerate(subscription_paths): topic = topic_paths[i % 2] - cleanup.append((subscriber.delete_subscription, subscription)) - subscriber.create_subscription(subscription, topic) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription}) + ) + subscriber.create_subscription(name=subscription, topic=topic) # retrieve subscriptions and check that the list matches the expected - subscriptions = publisher.list_topic_subscriptions(topic_paths[0]) - subscriptions = set(subscriptions) + response = publisher.list_topic_subscriptions(topic=topic_paths[0]) + subscriptions = set(response) assert subscriptions == {subscription_paths[0], subscription_paths[2]} -def test_managing_topic_iam_policy(publisher, topic_path, cleanup): - cleanup.append((publisher.delete_topic, topic_path)) - +def test_managing_topic_iam_policy(topic_path_base, cleanup): + publisher = pubsub_v1.PublisherClient(transport="grpc") + topic_path = topic_path_base + "-managing-topic-iam-policy" + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) # create a topic and customize its policy - publisher.create_topic(topic_path) - topic_policy = publisher.get_iam_policy(topic_path) + publisher.create_topic(name=topic_path) + topic_policy = publisher.get_iam_policy(request={"resource": topic_path}) topic_policy.bindings.add(role="roles/pubsub.editor", members=["domain:google.com"]) topic_policy.bindings.add( role="roles/pubsub.viewer", members=["group:cloud-logs@google.com"] ) - new_policy = publisher.set_iam_policy(topic_path, topic_policy) + new_policy = publisher.set_iam_policy( + request={"resource": topic_path, "policy": topic_policy} + ) # fetch the topic policy again and check its values - topic_policy = publisher.get_iam_policy(topic_path) + topic_policy = publisher.get_iam_policy(request={"resource": topic_path}) assert topic_policy.bindings == new_policy.bindings assert len(topic_policy.bindings) == 2 @@ -355,25 +402,33 @@ def test_managing_topic_iam_policy(publisher, topic_path, cleanup): def test_managing_subscription_iam_policy( - publisher, subscriber, topic_path, subscription_path, cleanup + publisher, topic_path_base, subscription_path_base, cleanup ): + subscriber = pubsub_v1.SubscriberClient(transport="grpc") + custom_str = "-managing-subscription-iam-policy" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # create a topic and a subscription, customize the latter's policy - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) - sub_policy = subscriber.get_iam_policy(subscription_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) + sub_policy = subscriber.get_iam_policy(request={"resource": subscription_path}) sub_policy.bindings.add(role="roles/pubsub.editor", members=["domain:google.com"]) sub_policy.bindings.add( role="roles/pubsub.viewer", members=["group:cloud-logs@google.com"] ) - new_policy = subscriber.set_iam_policy(subscription_path, sub_policy) + new_policy = subscriber.set_iam_policy( + request={"resource": subscription_path, "policy": sub_policy} + ) # fetch the subscription policy again and check its values - sub_policy = subscriber.get_iam_policy(subscription_path) + sub_policy = subscriber.get_iam_policy(request={"resource": subscription_path}) assert sub_policy.bindings == new_policy.bindings assert len(sub_policy.bindings) == 2 @@ -385,8 +440,9 @@ def test_managing_subscription_iam_policy( assert bindings[1].members == ["group:cloud-logs@google.com"] +@pytest.mark.parametrize("transport", ["grpc", "rest"]) def test_subscriber_not_leaking_open_sockets( - publisher, topic_path, subscription_path, cleanup + publisher, topic_path_base, subscription_path_base, cleanup, transport ): # Make sure the topic and the supscription get deleted. # NOTE: Since subscriber client will be closed in the test, we should not @@ -395,22 +451,24 @@ def test_subscriber_not_leaking_open_sockets( # Also, since the client will get closed, we need another subscriber client # to clean up the subscription. We also need to make sure that auxiliary # subscriber releases the sockets, too. - subscriber = pubsub_v1.SubscriberClient() - subscriber_2 = pubsub_v1.SubscriberClient() - cleanup.append((subscriber_2.delete_subscription, subscription_path)) - - def one_arg_close(subscriber): # the cleanup helper expects exactly one argument - subscriber.close() - - cleanup.append((one_arg_close, subscriber_2)) - cleanup.append((publisher.delete_topic, topic_path)) + custom_str = "-not-leaking-open-sockets" + subscription_path = subscription_path_base + custom_str + topic_path = topic_path_base + custom_str + subscriber = pubsub_v1.SubscriberClient(transport="grpc") + subscriber_2 = pubsub_v1.SubscriberClient(transport="grpc") + + cleanup.append( + (subscriber_2.delete_subscription, (), {"subscription": subscription_path}) + ) + cleanup.append((subscriber_2.close, (), {})) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) # Create topic before starting to track connection count (any sockets opened # by the publisher client are not counted by this test). - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) current_process = psutil.Process() - conn_count_start = len(current_process.connections()) + conn_count_start = len(current_process.net_connections()) # Publish a few messages, then synchronously pull them and check that # no sockets are leaked. @@ -419,33 +477,42 @@ def one_arg_close(subscriber): # the cleanup helper expects exactly one argumen # Publish a few messages, wait for the publish to succeed. publish_futures = [ - publisher.publish(topic_path, u"message {}".format(i).encode()) + publisher.publish(topic_path, "message {}".format(i).encode()) for i in range(1, 4) ] for future in publish_futures: future.result() # Synchronously pull messages. - response = subscriber.pull(subscription_path, max_messages=3) + response = subscriber.pull(subscription=subscription_path, max_messages=3) assert len(response.received_messages) == 3 - conn_count_end = len(current_process.connections()) - assert conn_count_end == conn_count_start + conn_count_end = len(current_process.net_connections()) + + # To avoid flakiness, use <= in the assertion, since on rare occasions additional + # sockets are closed, causing the == assertion to fail. + # https://github.com/googleapis/python-pubsub/issues/483#issuecomment-910122086 + assert conn_count_end <= conn_count_start def test_synchronous_pull_no_deadline_error_if_no_messages( - publisher, topic_path, subscriber, subscription_path, cleanup + publisher, topic_path_base, subscriber, subscription_path_base, cleanup ): + custom_str = "-synchronous-pull-deadline-error-if-no-messages" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # Create a topic and subscribe to it. - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) try: - response = subscriber.pull(subscription_path, max_messages=2) + response = subscriber.pull(subscription=subscription_path, max_messages=2) except core_exceptions.DeadlineExceeded: pytest.fail( "Unexpected DeadlineExceeded error on synchronous pull when no " @@ -457,15 +524,21 @@ def test_synchronous_pull_no_deadline_error_if_no_messages( class TestStreamingPull(object): def test_streaming_pull_callback_error_propagation( - self, publisher, topic_path, subscriber, subscription_path, cleanup + self, publisher, topic_path_base, subscription_path_base, cleanup ): + subscriber = pubsub_v1.SubscriberClient(transport="grpc") + custom_str = "-streaming-pull-callback-error-propagation" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # create a topic and subscribe to it - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) # publish a messages and wait until published future = publisher.publish(topic_path, b"hello!") @@ -482,21 +555,33 @@ class CallbackError(Exception): with pytest.raises(CallbackError): future.result(timeout=30) + @typed_flaky def test_streaming_pull_ack_deadline( - self, publisher, subscriber, project, topic_path, subscription_path, cleanup + self, + publisher, + project, + topic_path_base, + subscription_path_base, + cleanup, ): + subscriber = pubsub_v1.SubscriberClient(transport="grpc") + custom_str = "-streaming-pull-ack-deadline" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # Create a topic and a subscription, then subscribe to the topic. This # must happen before the messages are published. - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) # Subscribe to the topic. This must happen before the messages # are published. subscriber.create_subscription( - subscription_path, topic_path, ack_deadline_seconds=45 + name=subscription_path, topic=topic_path, ack_deadline_seconds=45 ) # publish some messages and wait for completion @@ -531,16 +616,23 @@ def test_streaming_pull_ack_deadline( finally: subscription_future.cancel() + @typed_flaky def test_streaming_pull_max_messages( - self, publisher, topic_path, subscriber, subscription_path, cleanup + self, publisher, topic_path_base, subscription_path_base, cleanup ): + subscriber = pubsub_v1.SubscriberClient(transport="grpc") + custom_str = "-streaming-pull-max-messages" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # create a topic and subscribe to it - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) batch_sizes = (7, 4, 8, 2, 10, 1, 3, 8, 6, 1) # total: 50 _publish_messages(publisher, topic_path, batch_sizes=batch_sizes) @@ -585,6 +677,87 @@ def test_streaming_pull_max_messages( finally: subscription_future.cancel() # trigger clean shutdown + @typed_flaky + def test_streaming_pull_blocking_shutdown( + self, publisher, topic_path_base, subscription_path_base, cleanup + ): + subscriber = pubsub_v1.SubscriberClient(transport="grpc") + custom_str = "-streaming-pull-blocking-shutdown" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) + + # The ACK-s are only persisted if *all* messages published in the same batch + # are ACK-ed. We thus publish each message in its own batch so that the backend + # treats all messages' ACKs independently of each other. + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) + _publish_messages(publisher, topic_path, batch_sizes=[1] * 10) + + # Artificially delay message processing, gracefully shutdown the streaming pull + # in the meantime, then verify that those messages were nevertheless processed. + processed_messages = [] + + def callback(message): + time.sleep(15) + processed_messages.append(message.data) + message.ack() + + # Flow control limits should exceed the number of worker threads, so that some + # of the messages will be blocked on waiting for free scheduler threads. + flow_control = pubsub_v1.types.FlowControl(max_messages=5) + executor = concurrent.futures.ThreadPoolExecutor(max_workers=3) + scheduler = pubsub_v1.subscriber.scheduler.ThreadScheduler(executor=executor) + subscription_future = subscriber.subscribe( + subscription_path, + callback=callback, + flow_control=flow_control, + scheduler=scheduler, + await_callbacks_on_shutdown=True, + ) + + try: + subscription_future.result(timeout=10) # less than the sleep in callback + except exceptions.TimeoutError: + subscription_future.cancel() + subscription_future.result() # block until shutdown completes + + # Blocking om shutdown should have waited for the already executing + # callbacks to finish. + assert len(processed_messages) == 3 + + # The messages that were not processed should have been NACK-ed and we should + # receive them again quite soon. + all_done = threading.Barrier(7 + 1, timeout=5) # +1 because of the main thread + remaining = [] + + def callback2(message): + remaining.append(message.data) + message.ack() + all_done.wait() + + subscription_future = subscriber.subscribe( + subscription_path, callback=callback2, await_callbacks_on_shutdown=False + ) + + try: + all_done.wait() + except threading.BrokenBarrierError: # PRAGMA: no cover + pytest.fail("The remaining messages have not been re-delivered in time.") + finally: + subscription_future.cancel() + subscription_future.result() # block until shutdown completes + + # There should be 7 messages left that were not yet processed and none of them + # should be a message that should have already been sucessfully processed in the + # first streaming pull. + assert len(remaining) == 7 + assert not (set(processed_messages) & set(remaining)) # no re-delivery + @pytest.mark.skipif( "KOKORO_GFILE_DIR" not in os.environ, @@ -592,16 +765,21 @@ def test_streaming_pull_max_messages( ) class TestBasicRBAC(object): def test_streaming_pull_subscriber_permissions_sufficient( - self, publisher, topic_path, subscriber, subscription_path, cleanup + self, publisher, topic_path_base, subscription_path_base, cleanup ): - + subscriber = pubsub_v1.SubscriberClient(transport="grpc") + custom_str = "-streaming-pull-subscriber-permissions-sufficient" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # create a topic and subscribe to it - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) # A service account granting only the pubsub.subscriber role must be used. filename = os.path.join( @@ -627,16 +805,20 @@ def test_streaming_pull_subscriber_permissions_sufficient( future.cancel() def test_publisher_role_can_publish_messages( - self, publisher, topic_path, subscriber, subscription_path, cleanup + self, publisher, topic_path_base, subscriber, subscription_path_base, cleanup ): - + custom_str = "-publisher-role-can-publish-messages" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # Create a topic and subscribe to it. - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) # Create a publisher client with only the publisher role only. filename = os.path.join( @@ -646,27 +828,38 @@ def test_publisher_role_can_publish_messages( _publish_messages(publisher_only_client, topic_path, batch_sizes=[2]) - response = subscriber.pull(subscription_path, max_messages=2) + response = subscriber.pull(subscription=subscription_path, max_messages=2) assert len(response.received_messages) == 2 @pytest.mark.skip( "Snapshot creation is not instant on the backend, causing test falkiness." ) def test_snapshot_seek_subscriber_permissions_sufficient( - self, project, publisher, topic_path, subscriber, subscription_path, cleanup + self, + project, + publisher, + topic_path_base, + subscriber, + subscription_path_base, + cleanup, ): + custom_str = "-snapshot-seek-subscriber-permissions-sufficient" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str snapshot_name = "snap" + unique_resource_id("-") snapshot_path = "projects/{}/snapshots/{}".format(project, snapshot_name) # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) - cleanup.append((subscriber.delete_snapshot, snapshot_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) + cleanup.append((subscriber.delete_snapshot, (), {"snapshot": snapshot_path})) # Create a topic and subscribe to it. - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) subscriber.create_subscription( - subscription_path, topic_path, retain_acked_messages=True + name=subscription_path, topic=topic_path, retain_acked_messages=True ) # A service account granting only the pubsub.subscriber role must be used. @@ -677,31 +870,34 @@ def test_snapshot_seek_subscriber_permissions_sufficient( # Publish two messages and create a snapshot inbetween. _publish_messages(publisher, topic_path, batch_sizes=[1]) - response = subscriber.pull(subscription_path, max_messages=10) + response = subscriber.pull(subscription=subscription_path, max_messages=10) assert len(response.received_messages) == 1 - subscriber.create_snapshot(snapshot_path, subscription_path) + subscriber.create_snapshot(name=snapshot_path, subscription=subscription_path) _publish_messages(publisher, topic_path, batch_sizes=[1]) - response = subscriber.pull(subscription_path, max_messages=10) + response = subscriber.pull(subscription=subscription_path, max_messages=10) assert len(response.received_messages) == 1 # A subscriber-only client should be allowed to seek to a snapshot. - subscriber_only_client.seek(subscription_path, snapshot=snapshot_path) + seek_request = gapic_types.SeekRequest( + subscription=subscription_path, snapshot=snapshot_path + ) + subscriber_only_client.seek(seek_request) # We should receive one message again, since we sought back to a snapshot. - response = subscriber.pull(subscription_path, max_messages=10) + response = subscriber.pull(subscription=subscription_path, max_messages=10) assert len(response.received_messages) == 1 def test_viewer_role_can_list_resources( - self, project, publisher, topic_path, subscriber, cleanup + self, project, publisher, topic_path_base, subscriber, cleanup ): project_path = "projects/" + project - + topic_path = topic_path_base + "-viewer-role-can-list-resources" # Make sure the created topic gets deleted. - cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) # A service account granting only the pubsub.viewer role must be used. filename = os.path.join( @@ -712,21 +908,39 @@ def test_viewer_role_can_list_resources( # The following operations should not raise permission denied errors. # NOTE: At least one topic exists. - topic = next(iter(viewer_only_publisher.list_topics(project_path))) - next(iter(viewer_only_publisher.list_topic_subscriptions(topic.name)), None) - next(iter(viewer_only_subscriber.list_subscriptions(project_path)), None) - next(iter(viewer_only_subscriber.list_snapshots(project_path)), None) + topic = next(iter(viewer_only_publisher.list_topics(project=project_path))) + + next( + iter(viewer_only_publisher.list_topic_subscriptions(topic=topic.name)), None + ) + + next( + iter(viewer_only_subscriber.list_subscriptions(project=project_path)), None + ) + + next(iter(viewer_only_subscriber.list_snapshots(project=project_path)), None) def test_editor_role_can_create_resources( - self, project, publisher, topic_path, subscriber, subscription_path, cleanup + self, + project, + publisher, + topic_path_base, + subscriber, + subscription_path_base, + cleanup, ): + custom_str = "-editor-role-can-create-resources" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str snapshot_name = "snap" + unique_resource_id("-") snapshot_path = "projects/{}/snapshots/{}".format(project, snapshot_name) # Make sure the created resources get deleted. - cleanup.append((subscriber.delete_snapshot, snapshot_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) - cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_snapshot, (), {"snapshot": snapshot_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) # A service account granting only the pubsub.editor role must be used. filename = os.path.join( @@ -736,9 +950,11 @@ def test_editor_role_can_create_resources( editor_publisher = type(publisher).from_service_account_file(filename) # The following operations should not raise permission denied errors. - editor_publisher.create_topic(topic_path) - editor_subscriber.create_subscription(subscription_path, topic_path) - editor_subscriber.create_snapshot(snapshot_path, subscription_path) + editor_publisher.create_topic(name=topic_path) + editor_subscriber.create_subscription(name=subscription_path, topic=topic_path) + editor_subscriber.create_snapshot( + name=snapshot_path, subscription=subscription_path + ) def _publish_messages(publisher, topic_path, batch_sizes): @@ -746,8 +962,8 @@ def _publish_messages(publisher, topic_path, batch_sizes): publish_futures = [] msg_counter = itertools.count(start=1) - for batch_size in batch_sizes: - msg_batch = _make_messages(count=batch_size) + for batch_num, batch_size in enumerate(batch_sizes, start=1): + msg_batch = _make_messages(count=batch_size, batch_num=batch_num) for msg in msg_batch: future = publisher.publish(topic_path, msg, seq_num=str(next(msg_counter))) publish_futures.append(future) @@ -758,9 +974,10 @@ def _publish_messages(publisher, topic_path, batch_sizes): future.result(timeout=30) -def _make_messages(count): +def _make_messages(count, batch_num): messages = [ - u"message {}/{}".format(i, count).encode("utf-8") for i in range(1, count + 1) + f"message {i}/{count} of batch {batch_num}".encode("utf-8") + for i in range(1, count + 1) ] return messages diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index e69de29bb..cbf94b283 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py new file mode 100644 index 000000000..cbf94b283 --- /dev/null +++ b/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/pubsub_v1/__init__.py b/tests/unit/gapic/pubsub_v1/__init__.py new file mode 100644 index 000000000..cbf94b283 --- /dev/null +++ b/tests/unit/gapic/pubsub_v1/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/pubsub_v1/test_publisher.py b/tests/unit/gapic/pubsub_v1/test_publisher.py new file mode 100644 index 000000000..978021fcd --- /dev/null +++ b/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -0,0 +1,9937 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.pubsub_v1.services.publisher import PublisherAsyncClient +from google.pubsub_v1.services.publisher import PublisherClient +from google.pubsub_v1.services.publisher import pagers +from google.pubsub_v1.services.publisher import transports +from google.pubsub_v1.types import pubsub +from google.pubsub_v1.types import schema +import google.auth + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PublisherClient._get_default_mtls_endpoint(None) is None + assert PublisherClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ( + PublisherClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + PublisherClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PublisherClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert PublisherClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert PublisherClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert PublisherClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert PublisherClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + PublisherClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert PublisherClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert PublisherClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert PublisherClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert PublisherClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + PublisherClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert PublisherClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): + assert PublisherClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): + assert PublisherClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert PublisherClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): + assert PublisherClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert PublisherClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): + assert PublisherClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert PublisherClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): + assert PublisherClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert PublisherClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + with pytest.raises(ValueError): + PublisherClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + assert PublisherClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert PublisherClient._use_client_cert_effective() is False + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert PublisherClient._get_client_cert_source(None, False) is None + assert ( + PublisherClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + PublisherClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + PublisherClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + PublisherClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + PublisherClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PublisherClient), +) +@mock.patch.object( + PublisherAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PublisherAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = PublisherClient._DEFAULT_UNIVERSE + default_endpoint = PublisherClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = PublisherClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + PublisherClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + PublisherClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == PublisherClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PublisherClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + PublisherClient._get_api_endpoint(None, None, default_universe, "always") + == PublisherClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PublisherClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == PublisherClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PublisherClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + PublisherClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + PublisherClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + PublisherClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + PublisherClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + PublisherClient._get_universe_domain(None, None) + == PublisherClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + PublisherClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PublisherClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PublisherClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PublisherClient, "grpc"), + (PublisherAsyncClient, "grpc_asyncio"), + (PublisherClient, "rest"), + ], +) +def test_publisher_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.PublisherGrpcTransport, "grpc"), + (transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.PublisherRestTransport, "rest"), + ], +) +def test_publisher_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (PublisherClient, "grpc"), + (PublisherAsyncClient, "grpc_asyncio"), + (PublisherClient, "rest"), + ], +) +def test_publisher_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) + + +def test_publisher_client_get_transport_class(): + transport = PublisherClient.get_transport_class() + available_transports = [ + transports.PublisherGrpcTransport, + transports.PublisherRestTransport, + ] + assert transport in available_transports + + transport = PublisherClient.get_transport_class("grpc") + assert transport == transports.PublisherGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc"), + ( + PublisherAsyncClient, + transports.PublisherGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PublisherClient, transports.PublisherRestTransport, "rest"), + ], +) +@mock.patch.object( + PublisherClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PublisherClient), +) +@mock.patch.object( + PublisherAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PublisherAsyncClient), +) +def test_publisher_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PublisherClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PublisherClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc", "true"), + ( + PublisherAsyncClient, + transports.PublisherGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (PublisherClient, transports.PublisherGrpcTransport, "grpc", "false"), + ( + PublisherAsyncClient, + transports.PublisherGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (PublisherClient, transports.PublisherRestTransport, "rest", "true"), + (PublisherClient, transports.PublisherRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + PublisherClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PublisherClient), +) +@mock.patch.object( + PublisherAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PublisherAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_publisher_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient]) +@mock.patch.object( + PublisherClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherClient) +) +@mock.patch.object( + PublisherAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PublisherAsyncClient), +) +def test_publisher_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + +@pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient]) +@mock.patch.object( + PublisherClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PublisherClient), +) +@mock.patch.object( + PublisherAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PublisherAsyncClient), +) +def test_publisher_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = PublisherClient._DEFAULT_UNIVERSE + default_endpoint = PublisherClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = PublisherClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc"), + ( + PublisherAsyncClient, + transports.PublisherGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (PublisherClient, transports.PublisherRestTransport, "rest"), + ], +) +def test_publisher_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc", grpc_helpers), + ( + PublisherAsyncClient, + transports.PublisherGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (PublisherClient, transports.PublisherRestTransport, "rest", None), + ], +) +def test_publisher_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_publisher_client_client_options_from_dict(): + with mock.patch( + "google.pubsub_v1.services.publisher.transports.PublisherGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PublisherClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc", grpc_helpers), + ( + PublisherAsyncClient, + transports.PublisherGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_publisher_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + scopes=None, + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.Topic, + dict, + ], +) +def test_create_topic(request_type, transport: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + response = client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.Topic() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE + + +def test_create_topic_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_topic(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + ) + + +def test_create_topic_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_topic] = mock_rpc + request = {} + client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_topic_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_topic + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_topic + ] = mock_rpc + + request = {} + await client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_topic_async( + transport: str = "grpc_asyncio", request_type=pubsub.Topic +): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + ) + response = await client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.Topic() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE + + +@pytest.mark.asyncio +async def test_create_topic_async_from_dict(): + await test_create_topic_async(request_type=dict) + + +def test_create_topic_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.Topic() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + call.return_value = pubsub.Topic() + client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_topic_field_headers_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.Topic() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + await client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_create_topic_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_topic( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_create_topic_flattened_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_topic( + pubsub.Topic(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_create_topic_flattened_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_topic( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_topic_flattened_error_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_topic( + pubsub.Topic(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.UpdateTopicRequest, + dict, + ], +) +def test_update_topic(request_type, transport: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + response = client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.UpdateTopicRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE + + +def test_update_topic_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.UpdateTopicRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_topic(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateTopicRequest() + + +def test_update_topic_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_topic] = mock_rpc + request = {} + client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_topic_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_topic + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_topic + ] = mock_rpc + + request = {} + await client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_topic_async( + transport: str = "grpc_asyncio", request_type=pubsub.UpdateTopicRequest +): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + ) + response = await client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.UpdateTopicRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE + + +@pytest.mark.asyncio +async def test_update_topic_async_from_dict(): + await test_update_topic_async(request_type=dict) + + +def test_update_topic_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateTopicRequest() + + request.topic.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + call.return_value = pubsub.Topic() + client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "topic.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_topic_field_headers_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateTopicRequest() + + request.topic.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + await client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "topic.name=name_value", + ) in kw["metadata"] + + +def test_update_topic_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_topic( + topic=pubsub.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = pubsub.Topic(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_topic_flattened_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_topic( + pubsub.UpdateTopicRequest(), + topic=pubsub.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_topic_flattened_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_topic( + topic=pubsub.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = pubsub.Topic(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_topic_flattened_error_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_topic( + pubsub.UpdateTopicRequest(), + topic=pubsub.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.PublishRequest, + dict, + ], +) +def test_publish(request_type, transport: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.publish), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PublishResponse( + message_ids=["message_ids_value"], + ) + response = client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.PublishRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PublishResponse) + assert response.message_ids == ["message_ids_value"] + + +def test_publish_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.PublishRequest( + topic="topic_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.publish), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.publish(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.PublishRequest( + topic="topic_value", + ) + + +def test_publish_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.publish in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.publish] = mock_rpc + request = {} + client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.publish(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_publish_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.publish + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.publish + ] = mock_rpc + + request = {} + await client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.publish(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_publish_async( + transport: str = "grpc_asyncio", request_type=pubsub.PublishRequest +): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.publish), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.PublishResponse( + message_ids=["message_ids_value"], + ) + ) + response = await client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.PublishRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PublishResponse) + assert response.message_ids == ["message_ids_value"] + + +@pytest.mark.asyncio +async def test_publish_async_from_dict(): + await test_publish_async(request_type=dict) + + +def test_publish_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.PublishRequest() + + request.topic = "topic_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.publish), "__call__") as call: + call.return_value = pubsub.PublishResponse() + client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "topic=topic_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_publish_field_headers_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.PublishRequest() + + request.topic = "topic_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.publish), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.PublishResponse() + ) + await client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "topic=topic_value", + ) in kw["metadata"] + + +def test_publish_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.publish), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PublishResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.publish( + topic="topic_value", + messages=[pubsub.PubsubMessage(data=b"data_blob")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val + arg = args[0].messages + mock_val = [pubsub.PubsubMessage(data=b"data_blob")] + assert arg == mock_val + + +def test_publish_flattened_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.publish( + pubsub.PublishRequest(), + topic="topic_value", + messages=[pubsub.PubsubMessage(data=b"data_blob")], + ) + + +@pytest.mark.asyncio +async def test_publish_flattened_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.publish), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PublishResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.PublishResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.publish( + topic="topic_value", + messages=[pubsub.PubsubMessage(data=b"data_blob")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val + arg = args[0].messages + mock_val = [pubsub.PubsubMessage(data=b"data_blob")] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_publish_flattened_error_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.publish( + pubsub.PublishRequest(), + topic="topic_value", + messages=[pubsub.PubsubMessage(data=b"data_blob")], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.GetTopicRequest, + dict, + ], +) +def test_get_topic(request_type, transport: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + response = client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.GetTopicRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE + + +def test_get_topic_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.GetTopicRequest( + topic="topic_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_topic(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetTopicRequest( + topic="topic_value", + ) + + +def test_get_topic_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_topic] = mock_rpc + request = {} + client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_topic_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_topic + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_topic + ] = mock_rpc + + request = {} + await client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_topic_async( + transport: str = "grpc_asyncio", request_type=pubsub.GetTopicRequest +): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + ) + response = await client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.GetTopicRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE + + +@pytest.mark.asyncio +async def test_get_topic_async_from_dict(): + await test_get_topic_async(request_type=dict) + + +def test_get_topic_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetTopicRequest() + + request.topic = "topic_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + call.return_value = pubsub.Topic() + client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "topic=topic_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_topic_field_headers_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetTopicRequest() + + request.topic = "topic_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + await client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "topic=topic_value", + ) in kw["metadata"] + + +def test_get_topic_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_topic( + topic="topic_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val + + +def test_get_topic_flattened_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_topic( + pubsub.GetTopicRequest(), + topic="topic_value", + ) + + +@pytest.mark.asyncio +async def test_get_topic_flattened_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_topic( + topic="topic_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_topic_flattened_error_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_topic( + pubsub.GetTopicRequest(), + topic="topic_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListTopicsRequest, + dict, + ], +) +def test_list_topics(request_type, transport: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.ListTopicsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_topics_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.ListTopicsRequest( + project="project_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_topics(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicsRequest( + project="project_value", + page_token="page_token_value", + ) + + +def test_list_topics_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_topics in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_topics] = mock_rpc + request = {} + client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_topics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_topics_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_topics + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_topics + ] = mock_rpc + + request = {} + await client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_topics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_topics_async( + transport: str = "grpc_asyncio", request_type=pubsub.ListTopicsRequest +): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.ListTopicsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_topics_async_from_dict(): + await test_list_topics_async(request_type=dict) + + +def test_list_topics_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicsRequest() + + request.project = "project_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + call.return_value = pubsub.ListTopicsResponse() + client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project=project_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_topics_field_headers_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicsRequest() + + request.project = "project_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicsResponse() + ) + await client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project=project_value", + ) in kw["metadata"] + + +def test_list_topics_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_topics( + project="project_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project + mock_val = "project_value" + assert arg == mock_val + + +def test_list_topics_flattened_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topics( + pubsub.ListTopicsRequest(), + project="project_value", + ) + + +@pytest.mark.asyncio +async def test_list_topics_flattened_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_topics( + project="project_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project + mock_val = "project_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_topics_flattened_error_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_topics( + pubsub.ListTopicsRequest(), + project="project_value", + ) + + +def test_list_topics_pager(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + pubsub.Topic(), + ], + next_page_token="abc", + ), + pubsub.ListTopicsResponse( + topics=[], + next_page_token="def", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", ""),)), + ) + pager = client.list_topics(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, pubsub.Topic) for i in results) + + +def test_list_topics_pages(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + pubsub.Topic(), + ], + next_page_token="abc", + ), + pubsub.ListTopicsResponse( + topics=[], + next_page_token="def", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + ], + ), + RuntimeError, + ) + pages = list(client.list_topics(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_topics_async_pager(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topics), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + pubsub.Topic(), + ], + next_page_token="abc", + ), + pubsub.ListTopicsResponse( + topics=[], + next_page_token="def", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_topics( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, pubsub.Topic) for i in responses) + + +@pytest.mark.asyncio +async def test_list_topics_async_pages(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topics), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + pubsub.Topic(), + ], + next_page_token="abc", + ), + pubsub.ListTopicsResponse( + topics=[], + next_page_token="def", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_topics(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListTopicSubscriptionsRequest, + dict, + ], +) +def test_list_topic_subscriptions(request_type, transport: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSubscriptionsResponse( + subscriptions=["subscriptions_value"], + next_page_token="next_page_token_value", + ) + response = client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.ListTopicSubscriptionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSubscriptionsPager) + assert response.subscriptions == ["subscriptions_value"] + assert response.next_page_token == "next_page_token_value" + + +def test_list_topic_subscriptions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.ListTopicSubscriptionsRequest( + topic="topic_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_topic_subscriptions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicSubscriptionsRequest( + topic="topic_value", + page_token="page_token_value", + ) + + +def test_list_topic_subscriptions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_topic_subscriptions + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_topic_subscriptions + ] = mock_rpc + request = {} + client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_topic_subscriptions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_topic_subscriptions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_topic_subscriptions + ] = mock_rpc + + request = {} + await client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_topic_subscriptions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async( + transport: str = "grpc_asyncio", request_type=pubsub.ListTopicSubscriptionsRequest +): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=["subscriptions_value"], + next_page_token="next_page_token_value", + ) + ) + response = await client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.ListTopicSubscriptionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSubscriptionsAsyncPager) + assert response.subscriptions == ["subscriptions_value"] + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async_from_dict(): + await test_list_topic_subscriptions_async(request_type=dict) + + +def test_list_topic_subscriptions_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicSubscriptionsRequest() + + request.topic = "topic_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + call.return_value = pubsub.ListTopicSubscriptionsResponse() + client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "topic=topic_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_field_headers_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicSubscriptionsRequest() + + request.topic = "topic_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSubscriptionsResponse() + ) + await client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "topic=topic_value", + ) in kw["metadata"] + + +def test_list_topic_subscriptions_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSubscriptionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_topic_subscriptions( + topic="topic_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val + + +def test_list_topic_subscriptions_flattened_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topic_subscriptions( + pubsub.ListTopicSubscriptionsRequest(), + topic="topic_value", + ) + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_flattened_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSubscriptionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSubscriptionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_topic_subscriptions( + topic="topic_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_flattened_error_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_topic_subscriptions( + pubsub.ListTopicSubscriptionsRequest(), + topic="topic_value", + ) + + +def test_list_topic_subscriptions_pager(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], + next_page_token="def", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", ""),)), + ) + pager = client.list_topic_subscriptions( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + +def test_list_topic_subscriptions_pages(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], + next_page_token="def", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = list(client.list_topic_subscriptions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async_pager(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], + next_page_token="def", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_topic_subscriptions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) for i in responses) + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async_pages(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], + next_page_token="def", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_topic_subscriptions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListTopicSnapshotsRequest, + dict, + ], +) +def test_list_topic_snapshots(request_type, transport: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSnapshotsResponse( + snapshots=["snapshots_value"], + next_page_token="next_page_token_value", + ) + response = client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.ListTopicSnapshotsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSnapshotsPager) + assert response.snapshots == ["snapshots_value"] + assert response.next_page_token == "next_page_token_value" + + +def test_list_topic_snapshots_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.ListTopicSnapshotsRequest( + topic="topic_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_topic_snapshots(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicSnapshotsRequest( + topic="topic_value", + page_token="page_token_value", + ) + + +def test_list_topic_snapshots_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_topic_snapshots in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_topic_snapshots + ] = mock_rpc + request = {} + client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_topic_snapshots(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_topic_snapshots + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_topic_snapshots + ] = mock_rpc + + request = {} + await client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_topic_snapshots(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_async( + transport: str = "grpc_asyncio", request_type=pubsub.ListTopicSnapshotsRequest +): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSnapshotsResponse( + snapshots=["snapshots_value"], + next_page_token="next_page_token_value", + ) + ) + response = await client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.ListTopicSnapshotsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSnapshotsAsyncPager) + assert response.snapshots == ["snapshots_value"] + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_async_from_dict(): + await test_list_topic_snapshots_async(request_type=dict) + + +def test_list_topic_snapshots_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicSnapshotsRequest() + + request.topic = "topic_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + call.return_value = pubsub.ListTopicSnapshotsResponse() + client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "topic=topic_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_field_headers_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicSnapshotsRequest() + + request.topic = "topic_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSnapshotsResponse() + ) + await client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "topic=topic_value", + ) in kw["metadata"] + + +def test_list_topic_snapshots_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSnapshotsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_topic_snapshots( + topic="topic_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val + + +def test_list_topic_snapshots_flattened_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topic_snapshots( + pubsub.ListTopicSnapshotsRequest(), + topic="topic_value", + ) + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_flattened_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSnapshotsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSnapshotsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_topic_snapshots( + topic="topic_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_flattened_error_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_topic_snapshots( + pubsub.ListTopicSnapshotsRequest(), + topic="topic_value", + ) + + +def test_list_topic_snapshots_pager(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[], + next_page_token="def", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", ""),)), + ) + pager = client.list_topic_snapshots(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + +def test_list_topic_snapshots_pages(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[], + next_page_token="def", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = list(client.list_topic_snapshots(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_async_pager(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[], + next_page_token="def", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_topic_snapshots( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) for i in responses) + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_async_pages(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[], + next_page_token="def", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_topic_snapshots(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DeleteTopicRequest, + dict, + ], +) +def test_delete_topic(request_type, transport: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.DeleteTopicRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_topic_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.DeleteTopicRequest( + topic="topic_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_topic(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteTopicRequest( + topic="topic_value", + ) + + +def test_delete_topic_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_topic] = mock_rpc + request = {} + client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_topic_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_topic + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_topic + ] = mock_rpc + + request = {} + await client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_topic_async( + transport: str = "grpc_asyncio", request_type=pubsub.DeleteTopicRequest +): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.DeleteTopicRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_topic_async_from_dict(): + await test_delete_topic_async(request_type=dict) + + +def test_delete_topic_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteTopicRequest() + + request.topic = "topic_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + call.return_value = None + client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "topic=topic_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_topic_field_headers_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteTopicRequest() + + request.topic = "topic_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "topic=topic_value", + ) in kw["metadata"] + + +def test_delete_topic_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_topic( + topic="topic_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val + + +def test_delete_topic_flattened_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_topic( + pubsub.DeleteTopicRequest(), + topic="topic_value", + ) + + +@pytest.mark.asyncio +async def test_delete_topic_flattened_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_topic( + topic="topic_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_topic_flattened_error_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_topic( + pubsub.DeleteTopicRequest(), + topic="topic_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DetachSubscriptionRequest, + dict, + ], +) +def test_detach_subscription(request_type, transport: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.DetachSubscriptionResponse() + response = client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.DetachSubscriptionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.DetachSubscriptionResponse) + + +def test_detach_subscription_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.DetachSubscriptionRequest( + subscription="subscription_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.detach_subscription(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DetachSubscriptionRequest( + subscription="subscription_value", + ) + + +def test_detach_subscription_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.detach_subscription in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.detach_subscription + ] = mock_rpc + request = {} + client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.detach_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_detach_subscription_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.detach_subscription + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.detach_subscription + ] = mock_rpc + + request = {} + await client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.detach_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_detach_subscription_async( + transport: str = "grpc_asyncio", request_type=pubsub.DetachSubscriptionRequest +): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.DetachSubscriptionResponse() + ) + response = await client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.DetachSubscriptionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.DetachSubscriptionResponse) + + +@pytest.mark.asyncio +async def test_detach_subscription_async_from_dict(): + await test_detach_subscription_async(request_type=dict) + + +def test_detach_subscription_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DetachSubscriptionRequest() + + request.subscription = "subscription_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), "__call__" + ) as call: + call.return_value = pubsub.DetachSubscriptionResponse() + client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription=subscription_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_detach_subscription_field_headers_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DetachSubscriptionRequest() + + request.subscription = "subscription_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.DetachSubscriptionResponse() + ) + await client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription=subscription_value", + ) in kw["metadata"] + + +def test_create_topic_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_topic] = mock_rpc + + request = {} + client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_topic_rest_required_fields(request_type=pubsub.Topic): + transport_class = transports.PublisherRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "put", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_topic(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_topic_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_topic._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_create_topic_rest_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/topics/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_topic(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/topics/*}" % client.transport._host, args[1] + ) + + +def test_create_topic_rest_flattened_error(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_topic( + pubsub.Topic(), + name="name_value", + ) + + +def test_update_topic_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_topic] = mock_rpc + + request = {} + client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_topic_rest_required_fields(request_type=pubsub.UpdateTopicRequest): + transport_class = transports.PublisherRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_topic(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_topic_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_topic._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "topic", + "updateMask", + ) + ) + ) + + +def test_update_topic_rest_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic() + + # get arguments that satisfy an http rule for this method + sample_request = {"topic": {"name": "projects/sample1/topics/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + topic=pubsub.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_topic(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{topic.name=projects/*/topics/*}" % client.transport._host, args[1] + ) + + +def test_update_topic_rest_flattened_error(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_topic( + pubsub.UpdateTopicRequest(), + topic=pubsub.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_publish_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.publish in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.publish] = mock_rpc + + request = {} + client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.publish(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_publish_rest_required_fields(request_type=pubsub.PublishRequest): + transport_class = transports.PublisherRestTransport + + request_init = {} + request_init["topic"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).publish._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["topic"] = "topic_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).publish._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "topic" in jsonified_request + assert jsonified_request["topic"] == "topic_value" + + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.PublishResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.PublishResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.publish(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_publish_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.publish._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "topic", + "messages", + ) + ) + ) + + +def test_publish_rest_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.PublishResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"topic": "projects/sample1/topics/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + topic="topic_value", + messages=[pubsub.PubsubMessage(data=b"data_blob")], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pubsub.PublishResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.publish(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{topic=projects/*/topics/*}:publish" % client.transport._host, + args[1], + ) + + +def test_publish_rest_flattened_error(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.publish( + pubsub.PublishRequest(), + topic="topic_value", + messages=[pubsub.PubsubMessage(data=b"data_blob")], + ) + + +def test_get_topic_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_topic] = mock_rpc + + request = {} + client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_topic_rest_required_fields(request_type=pubsub.GetTopicRequest): + transport_class = transports.PublisherRestTransport + + request_init = {} + request_init["topic"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["topic"] = "topic_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "topic" in jsonified_request + assert jsonified_request["topic"] == "topic_value" + + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_topic(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_topic_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_topic._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("topic",))) + + +def test_get_topic_rest_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic() + + # get arguments that satisfy an http rule for this method + sample_request = {"topic": "projects/sample1/topics/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + topic="topic_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_topic(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{topic=projects/*/topics/*}" % client.transport._host, args[1] + ) + + +def test_get_topic_rest_flattened_error(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_topic( + pubsub.GetTopicRequest(), + topic="topic_value", + ) + + +def test_list_topics_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_topics in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_topics] = mock_rpc + + request = {} + client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_topics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_topics_rest_required_fields(request_type=pubsub.ListTopicsRequest): + transport_class = transports.PublisherRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_topics._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_topics._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.ListTopicsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_topics(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_topics_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_topics._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("project",)) + ) + + +def test_list_topics_rest_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pubsub.ListTopicsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_topics(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{project=projects/*}/topics" % client.transport._host, args[1] + ) + + +def test_list_topics_rest_flattened_error(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topics( + pubsub.ListTopicsRequest(), + project="project_value", + ) + + +def test_list_topics_rest_pager(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + pubsub.Topic(), + ], + next_page_token="abc", + ), + pubsub.ListTopicsResponse( + topics=[], + next_page_token="def", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(pubsub.ListTopicsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "projects/sample1"} + + pager = client.list_topics(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, pubsub.Topic) for i in results) + + pages = list(client.list_topics(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_topic_subscriptions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_topic_subscriptions + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_topic_subscriptions + ] = mock_rpc + + request = {} + client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_topic_subscriptions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_topic_subscriptions_rest_required_fields( + request_type=pubsub.ListTopicSubscriptionsRequest, +): + transport_class = transports.PublisherRestTransport + + request_init = {} + request_init["topic"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_topic_subscriptions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["topic"] = "topic_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_topic_subscriptions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "topic" in jsonified_request + assert jsonified_request["topic"] == "topic_value" + + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicSubscriptionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.ListTopicSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_topic_subscriptions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_topic_subscriptions_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_topic_subscriptions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("topic",)) + ) + + +def test_list_topic_subscriptions_rest_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicSubscriptionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"topic": "projects/sample1/topics/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + topic="topic_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pubsub.ListTopicSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_topic_subscriptions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{topic=projects/*/topics/*}/subscriptions" % client.transport._host, + args[1], + ) + + +def test_list_topic_subscriptions_rest_flattened_error(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topic_subscriptions( + pubsub.ListTopicSubscriptionsRequest(), + topic="topic_value", + ) + + +def test_list_topic_subscriptions_rest_pager(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], + next_page_token="def", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + pubsub.ListTopicSubscriptionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"topic": "projects/sample1/topics/sample2"} + + pager = client.list_topic_subscriptions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + pages = list(client.list_topic_subscriptions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_topic_snapshots_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_topic_snapshots in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_topic_snapshots + ] = mock_rpc + + request = {} + client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_topic_snapshots(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_topic_snapshots_rest_required_fields( + request_type=pubsub.ListTopicSnapshotsRequest, +): + transport_class = transports.PublisherRestTransport + + request_init = {} + request_init["topic"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_topic_snapshots._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["topic"] = "topic_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_topic_snapshots._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "topic" in jsonified_request + assert jsonified_request["topic"] == "topic_value" + + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicSnapshotsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.ListTopicSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_topic_snapshots(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_topic_snapshots_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_topic_snapshots._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("topic",)) + ) + + +def test_list_topic_snapshots_rest_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicSnapshotsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"topic": "projects/sample1/topics/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + topic="topic_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pubsub.ListTopicSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_topic_snapshots(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{topic=projects/*/topics/*}/snapshots" % client.transport._host, + args[1], + ) + + +def test_list_topic_snapshots_rest_flattened_error(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topic_snapshots( + pubsub.ListTopicSnapshotsRequest(), + topic="topic_value", + ) + + +def test_list_topic_snapshots_rest_pager(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[], + next_page_token="def", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(pubsub.ListTopicSnapshotsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"topic": "projects/sample1/topics/sample2"} + + pager = client.list_topic_snapshots(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + pages = list(client.list_topic_snapshots(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_delete_topic_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_topic] = mock_rpc + + request = {} + client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_topic_rest_required_fields(request_type=pubsub.DeleteTopicRequest): + transport_class = transports.PublisherRestTransport + + request_init = {} + request_init["topic"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["topic"] = "topic_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "topic" in jsonified_request + assert jsonified_request["topic"] == "topic_value" + + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_topic(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_topic_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_topic._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("topic",))) + + +def test_delete_topic_rest_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"topic": "projects/sample1/topics/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + topic="topic_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_topic(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{topic=projects/*/topics/*}" % client.transport._host, args[1] + ) + + +def test_delete_topic_rest_flattened_error(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_topic( + pubsub.DeleteTopicRequest(), + topic="topic_value", + ) + + +def test_detach_subscription_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.detach_subscription in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.detach_subscription + ] = mock_rpc + + request = {} + client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.detach_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_detach_subscription_rest_required_fields( + request_type=pubsub.DetachSubscriptionRequest, +): + transport_class = transports.PublisherRestTransport + + request_init = {} + request_init["subscription"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).detach_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["subscription"] = "subscription_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).detach_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.DetachSubscriptionResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.DetachSubscriptionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.detach_subscription(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_detach_subscription_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.detach_subscription._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("subscription",))) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublisherClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PublisherClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PublisherClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublisherClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PublisherClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PublisherGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PublisherGrpcTransport, + transports.PublisherGrpcAsyncIOTransport, + transports.PublisherRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = PublisherClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_topic_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + call.return_value = pubsub.Topic() + client.create_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.Topic() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_topic_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + call.return_value = pubsub.Topic() + client.update_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_publish_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.publish), "__call__") as call: + call.return_value = pubsub.PublishResponse() + client.publish(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.PublishRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_topic_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + call.return_value = pubsub.Topic() + client.get_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_topics_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + call.return_value = pubsub.ListTopicsResponse() + client.list_topics(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_topic_subscriptions_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + call.return_value = pubsub.ListTopicSubscriptionsResponse() + client.list_topic_subscriptions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicSubscriptionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_topic_snapshots_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + call.return_value = pubsub.ListTopicSnapshotsResponse() + client.list_topic_snapshots(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicSnapshotsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_topic_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + call.return_value = None + client.delete_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_detach_subscription_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), "__call__" + ) as call: + call.return_value = pubsub.DetachSubscriptionResponse() + client.detach_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DetachSubscriptionRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = PublisherAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_topic_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + ) + await client.create_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.Topic() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_topic_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + ) + await client.update_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_publish_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.publish), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.PublishResponse( + message_ids=["message_ids_value"], + ) + ) + await client.publish(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.PublishRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_topic_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + ) + await client.get_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_topics_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_topics(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_topic_subscriptions_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=["subscriptions_value"], + next_page_token="next_page_token_value", + ) + ) + await client.list_topic_subscriptions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicSubscriptionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_topic_snapshots_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSnapshotsResponse( + snapshots=["snapshots_value"], + next_page_token="next_page_token_value", + ) + ) + await client.list_topic_snapshots(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicSnapshotsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_topic_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_detach_subscription_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.DetachSubscriptionResponse() + ) + await client.detach_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DetachSubscriptionRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = PublisherClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_topic_rest_bad_request(request_type=pubsub.Topic): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_topic(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.Topic, + dict, + ], +) +def test_create_topic_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_topic(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_topic_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_create_topic" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "post_create_topic_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.PublisherRestInterceptor, "pre_create_topic" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.Topic.pb(pubsub.Topic()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.Topic.to_json(pubsub.Topic()) + req.return_value.content = return_value + + request = pubsub.Topic() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Topic() + post_with_metadata.return_value = pubsub.Topic(), metadata + + client.create_topic( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_topic_rest_bad_request(request_type=pubsub.UpdateTopicRequest): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"topic": {"name": "projects/sample1/topics/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_topic(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.UpdateTopicRequest, + dict, + ], +) +def test_update_topic_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"topic": {"name": "projects/sample1/topics/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_topic(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_topic_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_update_topic" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "post_update_topic_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.PublisherRestInterceptor, "pre_update_topic" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.UpdateTopicRequest.pb(pubsub.UpdateTopicRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.Topic.to_json(pubsub.Topic()) + req.return_value.content = return_value + + request = pubsub.UpdateTopicRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Topic() + post_with_metadata.return_value = pubsub.Topic(), metadata + + client.update_topic( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_publish_rest_bad_request(request_type=pubsub.PublishRequest): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.publish(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.PublishRequest, + dict, + ], +) +def test_publish_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.PublishResponse( + message_ids=["message_ids_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.PublishResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.publish(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PublishResponse) + assert response.message_ids == ["message_ids_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_publish_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_publish" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "post_publish_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.PublisherRestInterceptor, "pre_publish" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.PublishRequest.pb(pubsub.PublishRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.PublishResponse.to_json(pubsub.PublishResponse()) + req.return_value.content = return_value + + request = pubsub.PublishRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.PublishResponse() + post_with_metadata.return_value = pubsub.PublishResponse(), metadata + + client.publish( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_topic_rest_bad_request(request_type=pubsub.GetTopicRequest): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_topic(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.GetTopicRequest, + dict, + ], +) +def test_get_topic_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_topic(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_topic_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_get_topic" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "post_get_topic_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.PublisherRestInterceptor, "pre_get_topic" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.GetTopicRequest.pb(pubsub.GetTopicRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.Topic.to_json(pubsub.Topic()) + req.return_value.content = return_value + + request = pubsub.GetTopicRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Topic() + post_with_metadata.return_value = pubsub.Topic(), metadata + + client.get_topic( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_topics_rest_bad_request(request_type=pubsub.ListTopicsRequest): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_topics(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListTopicsRequest, + dict, + ], +) +def test_list_topics_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.ListTopicsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_topics(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_topics_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_list_topics" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "post_list_topics_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.PublisherRestInterceptor, "pre_list_topics" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.ListTopicsRequest.pb(pubsub.ListTopicsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.ListTopicsResponse.to_json(pubsub.ListTopicsResponse()) + req.return_value.content = return_value + + request = pubsub.ListTopicsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.ListTopicsResponse() + post_with_metadata.return_value = pubsub.ListTopicsResponse(), metadata + + client.list_topics( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_topic_subscriptions_rest_bad_request( + request_type=pubsub.ListTopicSubscriptionsRequest, +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_topic_subscriptions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListTopicSubscriptionsRequest, + dict, + ], +) +def test_list_topic_subscriptions_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicSubscriptionsResponse( + subscriptions=["subscriptions_value"], + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.ListTopicSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_topic_subscriptions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSubscriptionsPager) + assert response.subscriptions == ["subscriptions_value"] + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_topic_subscriptions_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_list_topic_subscriptions" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, + "post_list_topic_subscriptions_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.PublisherRestInterceptor, "pre_list_topic_subscriptions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.ListTopicSubscriptionsRequest.pb( + pubsub.ListTopicSubscriptionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.ListTopicSubscriptionsResponse.to_json( + pubsub.ListTopicSubscriptionsResponse() + ) + req.return_value.content = return_value + + request = pubsub.ListTopicSubscriptionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.ListTopicSubscriptionsResponse() + post_with_metadata.return_value = ( + pubsub.ListTopicSubscriptionsResponse(), + metadata, + ) + + client.list_topic_subscriptions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_topic_snapshots_rest_bad_request( + request_type=pubsub.ListTopicSnapshotsRequest, +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_topic_snapshots(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListTopicSnapshotsRequest, + dict, + ], +) +def test_list_topic_snapshots_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicSnapshotsResponse( + snapshots=["snapshots_value"], + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.ListTopicSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_topic_snapshots(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSnapshotsPager) + assert response.snapshots == ["snapshots_value"] + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_topic_snapshots_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_list_topic_snapshots" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "post_list_topic_snapshots_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.PublisherRestInterceptor, "pre_list_topic_snapshots" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.ListTopicSnapshotsRequest.pb( + pubsub.ListTopicSnapshotsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.ListTopicSnapshotsResponse.to_json( + pubsub.ListTopicSnapshotsResponse() + ) + req.return_value.content = return_value + + request = pubsub.ListTopicSnapshotsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.ListTopicSnapshotsResponse() + post_with_metadata.return_value = pubsub.ListTopicSnapshotsResponse(), metadata + + client.list_topic_snapshots( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_topic_rest_bad_request(request_type=pubsub.DeleteTopicRequest): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_topic(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DeleteTopicRequest, + dict, + ], +) +def test_delete_topic_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_topic(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_topic_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "pre_delete_topic" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.DeleteTopicRequest.pb(pubsub.DeleteTopicRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = pubsub.DeleteTopicRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_topic( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_detach_subscription_rest_bad_request( + request_type=pubsub.DetachSubscriptionRequest, +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.detach_subscription(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DetachSubscriptionRequest, + dict, + ], +) +def test_detach_subscription_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.DetachSubscriptionResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.DetachSubscriptionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.detach_subscription(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.DetachSubscriptionResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_detach_subscription_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_detach_subscription" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "post_detach_subscription_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.PublisherRestInterceptor, "pre_detach_subscription" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.DetachSubscriptionRequest.pb( + pubsub.DetachSubscriptionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.DetachSubscriptionResponse.to_json( + pubsub.DetachSubscriptionResponse() + ) + req.return_value.content = return_value + + request = pubsub.DetachSubscriptionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.DetachSubscriptionResponse() + post_with_metadata.return_value = pubsub.DetachSubscriptionResponse(), metadata + + client.detach_subscription( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/subscriptions/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_initialize_client_w_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_topic_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + client.create_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.Topic() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_topic_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + client.update_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_publish_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.publish), "__call__") as call: + client.publish(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.PublishRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_topic_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + client.get_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_topics_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + client.list_topics(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_topic_subscriptions_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + client.list_topic_subscriptions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicSubscriptionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_topic_snapshots_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + client.list_topic_snapshots(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicSnapshotsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_topic_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + client.delete_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_detach_subscription_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), "__call__" + ) as call: + client.detach_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DetachSubscriptionRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PublisherGrpcTransport, + ) + + +def test_publisher_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PublisherTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_publisher_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.pubsub_v1.services.publisher.transports.PublisherTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PublisherTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_topic", + "update_topic", + "publish", + "get_topic", + "list_topics", + "list_topic_subscriptions", + "list_topic_snapshots", + "delete_topic", + "detach_subscription", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_publisher_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.pubsub_v1.services.publisher.transports.PublisherTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PublisherTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +def test_publisher_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.pubsub_v1.services.publisher.transports.PublisherTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PublisherTransport() + adc.assert_called_once() + + +def test_publisher_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PublisherClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PublisherGrpcTransport, + transports.PublisherGrpcAsyncIOTransport, + ], +) +def test_publisher_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PublisherGrpcTransport, + transports.PublisherGrpcAsyncIOTransport, + transports.PublisherRestTransport, + ], +) +def test_publisher_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PublisherGrpcTransport, grpc_helpers), + (transports.PublisherGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_publisher_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + scopes=["1", "2"], + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport], +) +def test_publisher_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_publisher_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.PublisherRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_publisher_host_no_port(transport_name): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="pubsub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_publisher_host_with_port(transport_name): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="pubsub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "pubsub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_publisher_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = PublisherClient( + credentials=creds1, + transport=transport_name, + ) + client2 = PublisherClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_topic._session + session2 = client2.transport.create_topic._session + assert session1 != session2 + session1 = client1.transport.update_topic._session + session2 = client2.transport.update_topic._session + assert session1 != session2 + session1 = client1.transport.publish._session + session2 = client2.transport.publish._session + assert session1 != session2 + session1 = client1.transport.get_topic._session + session2 = client2.transport.get_topic._session + assert session1 != session2 + session1 = client1.transport.list_topics._session + session2 = client2.transport.list_topics._session + assert session1 != session2 + session1 = client1.transport.list_topic_subscriptions._session + session2 = client2.transport.list_topic_subscriptions._session + assert session1 != session2 + session1 = client1.transport.list_topic_snapshots._session + session2 = client2.transport.list_topic_snapshots._session + assert session1 != session2 + session1 = client1.transport.delete_topic._session + session2 = client2.transport.delete_topic._session + assert session1 != session2 + session1 = client1.transport.detach_subscription._session + session2 = client2.transport.detach_subscription._session + assert session1 != session2 + + +def test_publisher_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PublisherGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_publisher_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PublisherGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") +@pytest.mark.parametrize( + "transport_class", + [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport], +) +def test_publisher_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport], +) +def test_publisher_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_crypto_key_path(): + project = "squid" + location = "clam" + key_ring = "whelk" + crypto_key = "octopus" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + actual = PublisherClient.crypto_key_path(project, location, key_ring, crypto_key) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "key_ring": "cuttlefish", + "crypto_key": "mussel", + } + path = PublisherClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_crypto_key_path(path) + assert expected == actual + + +def test_schema_path(): + project = "winkle" + schema = "nautilus" + expected = "projects/{project}/schemas/{schema}".format( + project=project, + schema=schema, + ) + actual = PublisherClient.schema_path(project, schema) + assert expected == actual + + +def test_parse_schema_path(): + expected = { + "project": "scallop", + "schema": "abalone", + } + path = PublisherClient.schema_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_schema_path(path) + assert expected == actual + + +def test_snapshot_path(): + project = "squid" + snapshot = "clam" + expected = "projects/{project}/snapshots/{snapshot}".format( + project=project, + snapshot=snapshot, + ) + actual = PublisherClient.snapshot_path(project, snapshot) + assert expected == actual + + +def test_parse_snapshot_path(): + expected = { + "project": "whelk", + "snapshot": "octopus", + } + path = PublisherClient.snapshot_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_snapshot_path(path) + assert expected == actual + + +def test_subscription_path(): + project = "oyster" + subscription = "nudibranch" + expected = "projects/{project}/subscriptions/{subscription}".format( + project=project, + subscription=subscription, + ) + actual = PublisherClient.subscription_path(project, subscription) + assert expected == actual + + +def test_parse_subscription_path(): + expected = { + "project": "cuttlefish", + "subscription": "mussel", + } + path = PublisherClient.subscription_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_subscription_path(path) + assert expected == actual + + +def test_topic_path(): + project = "winkle" + topic = "nautilus" + expected = "projects/{project}/topics/{topic}".format( + project=project, + topic=topic, + ) + actual = PublisherClient.topic_path(project, topic) + assert expected == actual + + +def test_parse_topic_path(): + expected = { + "project": "scallop", + "topic": "abalone", + } + path = PublisherClient.topic_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_topic_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = PublisherClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = PublisherClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = PublisherClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = PublisherClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = PublisherClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = PublisherClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = PublisherClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = PublisherClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = PublisherClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = PublisherClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PublisherTransport, "_prep_wrapped_messages" + ) as prep: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PublisherTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PublisherClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_set_iam_policy(transport: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (PublisherClient, transports.PublisherGrpcTransport), + (PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/tests/unit/gapic/pubsub_v1/test_schema_service.py b/tests/unit/gapic/pubsub_v1/test_schema_service.py new file mode 100644 index 000000000..f71b66805 --- /dev/null +++ b/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -0,0 +1,10478 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import timestamp_pb2 # type: ignore +from google.pubsub_v1.services.schema_service import SchemaServiceAsyncClient +from google.pubsub_v1.services.schema_service import SchemaServiceClient +from google.pubsub_v1.services.schema_service import pagers +from google.pubsub_v1.services.schema_service import transports +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema +import google.auth + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SchemaServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SchemaServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SchemaServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SchemaServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SchemaServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SchemaServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +def test__read_environment_variables(): + assert SchemaServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SchemaServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert SchemaServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + SchemaServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert SchemaServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert SchemaServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert SchemaServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert SchemaServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + SchemaServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert SchemaServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): + assert SchemaServiceClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): + assert SchemaServiceClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SchemaServiceClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): + assert SchemaServiceClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert SchemaServiceClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): + assert SchemaServiceClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert SchemaServiceClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): + assert SchemaServiceClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert SchemaServiceClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + with pytest.raises(ValueError): + SchemaServiceClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + assert SchemaServiceClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert SchemaServiceClient._use_client_cert_effective() is False + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert SchemaServiceClient._get_client_cert_source(None, False) is None + assert ( + SchemaServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + SchemaServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + SchemaServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + SchemaServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + SchemaServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SchemaServiceClient), +) +@mock.patch.object( + SchemaServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SchemaServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = SchemaServiceClient._DEFAULT_UNIVERSE + default_endpoint = SchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + SchemaServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + SchemaServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == SchemaServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SchemaServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + SchemaServiceClient._get_api_endpoint(None, None, default_universe, "always") + == SchemaServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SchemaServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == SchemaServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SchemaServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + SchemaServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + SchemaServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + SchemaServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + SchemaServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + SchemaServiceClient._get_universe_domain(None, None) + == SchemaServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + SchemaServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SchemaServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SchemaServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SchemaServiceClient, "grpc"), + (SchemaServiceAsyncClient, "grpc_asyncio"), + (SchemaServiceClient, "rest"), + ], +) +def test_schema_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SchemaServiceGrpcTransport, "grpc"), + (transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SchemaServiceRestTransport, "rest"), + ], +) +def test_schema_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SchemaServiceClient, "grpc"), + (SchemaServiceAsyncClient, "grpc_asyncio"), + (SchemaServiceClient, "rest"), + ], +) +def test_schema_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) + + +def test_schema_service_client_get_transport_class(): + transport = SchemaServiceClient.get_transport_class() + available_transports = [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceRestTransport, + ] + assert transport in available_transports + + transport = SchemaServiceClient.get_transport_class("grpc") + assert transport == transports.SchemaServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + SchemaServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SchemaServiceClient), +) +@mock.patch.object( + SchemaServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SchemaServiceAsyncClient), +) +def test_schema_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SchemaServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SchemaServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc", "true"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc", "false"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest", "true"), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + SchemaServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SchemaServiceClient), +) +@mock.patch.object( + SchemaServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SchemaServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_schema_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [SchemaServiceClient, SchemaServiceAsyncClient] +) +@mock.patch.object( + SchemaServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceClient), +) +@mock.patch.object( + SchemaServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceAsyncClient), +) +def test_schema_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + +@pytest.mark.parametrize( + "client_class", [SchemaServiceClient, SchemaServiceAsyncClient] +) +@mock.patch.object( + SchemaServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SchemaServiceClient), +) +@mock.patch.object( + SchemaServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SchemaServiceAsyncClient), +) +def test_schema_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = SchemaServiceClient._DEFAULT_UNIVERSE + default_endpoint = SchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest"), + ], +) +def test_schema_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SchemaServiceClient, + transports.SchemaServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest", None), + ], +) +def test_schema_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_schema_service_client_client_options_from_dict(): + with mock.patch( + "google.pubsub_v1.services.schema_service.transports.SchemaServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SchemaServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SchemaServiceClient, + transports.SchemaServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_schema_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + scopes=None, + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gp_schema.CreateSchemaRequest, + dict, + ], +) +def test_create_schema(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + response = client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gp_schema.CreateSchemaRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.Schema) + assert response.name == "name_value" + assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +def test_create_schema_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gp_schema.CreateSchemaRequest( + parent="parent_value", + schema_id="schema_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_schema(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.CreateSchemaRequest( + parent="parent_value", + schema_id="schema_id_value", + ) + + +def test_create_schema_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_schema] = mock_rpc + request = {} + client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_schema_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_schema + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_schema + ] = mock_rpc + + request = {} + await client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_schema_async( + transport: str = "grpc_asyncio", request_type=gp_schema.CreateSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + response = await client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gp_schema.CreateSchemaRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.Schema) + assert response.name == "name_value" + assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.asyncio +async def test_create_schema_async_from_dict(): + await test_create_schema_async(request_type=dict) + + +def test_create_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gp_schema.CreateSchemaRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + call.return_value = gp_schema.Schema() + client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gp_schema.CreateSchemaRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.Schema()) + await client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.Schema() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_schema( + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + schema_id="schema_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].schema + mock_val = gp_schema.Schema(name="name_value") + assert arg == mock_val + arg = args[0].schema_id + mock_val = "schema_id_value" + assert arg == mock_val + + +def test_create_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_schema( + gp_schema.CreateSchemaRequest(), + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + schema_id="schema_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.Schema() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.Schema()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_schema( + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + schema_id="schema_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].schema + mock_val = gp_schema.Schema(name="name_value") + assert arg == mock_val + arg = args[0].schema_id + mock_val = "schema_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_schema( + gp_schema.CreateSchemaRequest(), + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + schema_id="schema_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.GetSchemaRequest, + dict, + ], +) +def test_get_schema(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + response = client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = schema.GetSchemaRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +def test_get_schema_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = schema.GetSchemaRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_schema(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.GetSchemaRequest( + name="name_value", + ) + + +def test_get_schema_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_schema] = mock_rpc + request = {} + client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_schema_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_schema + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_schema + ] = mock_rpc + + request = {} + await client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_schema_async( + transport: str = "grpc_asyncio", request_type=schema.GetSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + response = await client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = schema.GetSchemaRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.asyncio +async def test_get_schema_async_from_dict(): + await test_get_schema_async(request_type=dict) + + +def test_get_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.GetSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + call.return_value = schema.Schema() + client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.GetSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + await client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_schema( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_schema( + schema.GetSchemaRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_schema( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_schema( + schema.GetSchemaRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.ListSchemasRequest, + dict, + ], +) +def test_list_schemas(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ListSchemasResponse( + next_page_token="next_page_token_value", + ) + response = client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = schema.ListSchemasRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemasPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_schemas_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = schema.ListSchemasRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_schemas(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ListSchemasRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_schemas_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_schemas in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_schemas] = mock_rpc + request = {} + client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_schemas(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_schemas_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_schemas + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_schemas + ] = mock_rpc + + request = {} + await client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_schemas(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_schemas_async( + transport: str = "grpc_asyncio", request_type=schema.ListSchemasRequest +): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemasResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = schema.ListSchemasRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemasAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_schemas_async_from_dict(): + await test_list_schemas_async(request_type=dict) + + +def test_list_schemas_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.ListSchemasRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + call.return_value = schema.ListSchemasResponse() + client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_schemas_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.ListSchemasRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemasResponse() + ) + await client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_schemas_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ListSchemasResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_schemas( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_schemas_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_schemas( + schema.ListSchemasRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_schemas_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ListSchemasResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemasResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_schemas( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_schemas_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_schemas( + schema.ListSchemasRequest(), + parent="parent_value", + ) + + +def test_list_schemas_pager(transport_name: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema.ListSchemasResponse( + schemas=[], + next_page_token="def", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_schemas(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, schema.Schema) for i in results) + + +def test_list_schemas_pages(transport_name: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema.ListSchemasResponse( + schemas=[], + next_page_token="def", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + pages = list(client.list_schemas(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_schemas_async_pager(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema.ListSchemasResponse( + schemas=[], + next_page_token="def", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_schemas( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, schema.Schema) for i in responses) + + +@pytest.mark.asyncio +async def test_list_schemas_async_pages(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema.ListSchemasResponse( + schemas=[], + next_page_token="def", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_schemas(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + schema.ListSchemaRevisionsRequest, + dict, + ], +) +def test_list_schema_revisions(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ListSchemaRevisionsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_schema_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = schema.ListSchemaRevisionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemaRevisionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_schema_revisions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = schema.ListSchemaRevisionsRequest( + name="name_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_schema_revisions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ListSchemaRevisionsRequest( + name="name_value", + page_token="page_token_value", + ) + + +def test_list_schema_revisions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_schema_revisions + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_schema_revisions + ] = mock_rpc + request = {} + client.list_schema_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_schema_revisions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_schema_revisions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_schema_revisions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_schema_revisions + ] = mock_rpc + + request = {} + await client.list_schema_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_schema_revisions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_schema_revisions_async( + transport: str = "grpc_asyncio", request_type=schema.ListSchemaRevisionsRequest +): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemaRevisionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_schema_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = schema.ListSchemaRevisionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemaRevisionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_schema_revisions_async_from_dict(): + await test_list_schema_revisions_async(request_type=dict) + + +def test_list_schema_revisions_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.ListSchemaRevisionsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + call.return_value = schema.ListSchemaRevisionsResponse() + client.list_schema_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_schema_revisions_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.ListSchemaRevisionsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemaRevisionsResponse() + ) + await client.list_schema_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_list_schema_revisions_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ListSchemaRevisionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_schema_revisions( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_list_schema_revisions_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_schema_revisions( + schema.ListSchemaRevisionsRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_list_schema_revisions_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ListSchemaRevisionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemaRevisionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_schema_revisions( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_schema_revisions_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_schema_revisions( + schema.ListSchemaRevisionsRequest(), + name="name_value", + ) + + +def test_list_schema_revisions_pager(transport_name: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema.ListSchemaRevisionsResponse( + schemas=[], + next_page_token="def", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), + ) + pager = client.list_schema_revisions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, schema.Schema) for i in results) + + +def test_list_schema_revisions_pages(transport_name: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema.ListSchemaRevisionsResponse( + schemas=[], + next_page_token="def", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + pages = list(client.list_schema_revisions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_schema_revisions_async_pager(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema.ListSchemaRevisionsResponse( + schemas=[], + next_page_token="def", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_schema_revisions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, schema.Schema) for i in responses) + + +@pytest.mark.asyncio +async def test_list_schema_revisions_async_pages(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema.ListSchemaRevisionsResponse( + schemas=[], + next_page_token="def", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_schema_revisions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gp_schema.CommitSchemaRequest, + dict, + ], +) +def test_commit_schema(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + response = client.commit_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gp_schema.CommitSchemaRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.Schema) + assert response.name == "name_value" + assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +def test_commit_schema_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gp_schema.CommitSchemaRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.commit_schema(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.CommitSchemaRequest( + name="name_value", + ) + + +def test_commit_schema_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.commit_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.commit_schema] = mock_rpc + request = {} + client.commit_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.commit_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_commit_schema_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.commit_schema + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.commit_schema + ] = mock_rpc + + request = {} + await client.commit_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.commit_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_commit_schema_async( + transport: str = "grpc_asyncio", request_type=gp_schema.CommitSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + response = await client.commit_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gp_schema.CommitSchemaRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.Schema) + assert response.name == "name_value" + assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.asyncio +async def test_commit_schema_async_from_dict(): + await test_commit_schema_async(request_type=dict) + + +def test_commit_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gp_schema.CommitSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + call.return_value = gp_schema.Schema() + client.commit_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_commit_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gp_schema.CommitSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.Schema()) + await client.commit_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_commit_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.Schema() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.commit_schema( + name="name_value", + schema=gp_schema.Schema(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].schema + mock_val = gp_schema.Schema(name="name_value") + assert arg == mock_val + + +def test_commit_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.commit_schema( + gp_schema.CommitSchemaRequest(), + name="name_value", + schema=gp_schema.Schema(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_commit_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.Schema() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.Schema()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.commit_schema( + name="name_value", + schema=gp_schema.Schema(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].schema + mock_val = gp_schema.Schema(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_commit_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.commit_schema( + gp_schema.CommitSchemaRequest(), + name="name_value", + schema=gp_schema.Schema(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.RollbackSchemaRequest, + dict, + ], +) +def test_rollback_schema(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + response = client.rollback_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = schema.RollbackSchemaRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +def test_rollback_schema_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = schema.RollbackSchemaRequest( + name="name_value", + revision_id="revision_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.rollback_schema(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.RollbackSchemaRequest( + name="name_value", + revision_id="revision_id_value", + ) + + +def test_rollback_schema_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.rollback_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.rollback_schema] = mock_rpc + request = {} + client.rollback_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.rollback_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_rollback_schema_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.rollback_schema + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.rollback_schema + ] = mock_rpc + + request = {} + await client.rollback_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.rollback_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_rollback_schema_async( + transport: str = "grpc_asyncio", request_type=schema.RollbackSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + response = await client.rollback_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = schema.RollbackSchemaRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.asyncio +async def test_rollback_schema_async_from_dict(): + await test_rollback_schema_async(request_type=dict) + + +def test_rollback_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.RollbackSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + call.return_value = schema.Schema() + client.rollback_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_rollback_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.RollbackSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + await client.rollback_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_rollback_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rollback_schema( + name="name_value", + revision_id="revision_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].revision_id + mock_val = "revision_id_value" + assert arg == mock_val + + +def test_rollback_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback_schema( + schema.RollbackSchemaRequest(), + name="name_value", + revision_id="revision_id_value", + ) + + +@pytest.mark.asyncio +async def test_rollback_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rollback_schema( + name="name_value", + revision_id="revision_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].revision_id + mock_val = "revision_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_rollback_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rollback_schema( + schema.RollbackSchemaRequest(), + name="name_value", + revision_id="revision_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.DeleteSchemaRevisionRequest, + dict, + ], +) +def test_delete_schema_revision(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + response = client.delete_schema_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = schema.DeleteSchemaRevisionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +def test_delete_schema_revision_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = schema.DeleteSchemaRevisionRequest( + name="name_value", + revision_id="revision_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_schema_revision(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.DeleteSchemaRevisionRequest( + name="name_value", + revision_id="revision_id_value", + ) + + +def test_delete_schema_revision_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_schema_revision + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_schema_revision + ] = mock_rpc + request = {} + client.delete_schema_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_schema_revision(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_schema_revision_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_schema_revision + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_schema_revision + ] = mock_rpc + + request = {} + await client.delete_schema_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_schema_revision(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_schema_revision_async( + transport: str = "grpc_asyncio", request_type=schema.DeleteSchemaRevisionRequest +): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + response = await client.delete_schema_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = schema.DeleteSchemaRevisionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.asyncio +async def test_delete_schema_revision_async_from_dict(): + await test_delete_schema_revision_async(request_type=dict) + + +def test_delete_schema_revision_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.DeleteSchemaRevisionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + call.return_value = schema.Schema() + client.delete_schema_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_schema_revision_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.DeleteSchemaRevisionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + await client.delete_schema_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_schema_revision_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_schema_revision( + name="name_value", + revision_id="revision_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].revision_id + mock_val = "revision_id_value" + assert arg == mock_val + + +def test_delete_schema_revision_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_schema_revision( + schema.DeleteSchemaRevisionRequest(), + name="name_value", + revision_id="revision_id_value", + ) + + +@pytest.mark.asyncio +async def test_delete_schema_revision_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_schema_revision( + name="name_value", + revision_id="revision_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].revision_id + mock_val = "revision_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_schema_revision_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_schema_revision( + schema.DeleteSchemaRevisionRequest(), + name="name_value", + revision_id="revision_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.DeleteSchemaRequest, + dict, + ], +) +def test_delete_schema(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = schema.DeleteSchemaRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_schema_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = schema.DeleteSchemaRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_schema(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.DeleteSchemaRequest( + name="name_value", + ) + + +def test_delete_schema_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_schema] = mock_rpc + request = {} + client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_schema_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_schema + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_schema + ] = mock_rpc + + request = {} + await client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_schema_async( + transport: str = "grpc_asyncio", request_type=schema.DeleteSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = schema.DeleteSchemaRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_schema_async_from_dict(): + await test_delete_schema_async(request_type=dict) + + +def test_delete_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.DeleteSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + call.return_value = None + client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.DeleteSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_schema( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_schema( + schema.DeleteSchemaRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_schema( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_schema( + schema.DeleteSchemaRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gp_schema.ValidateSchemaRequest, + dict, + ], +) +def test_validate_schema(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.ValidateSchemaResponse() + response = client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gp_schema.ValidateSchemaRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.ValidateSchemaResponse) + + +def test_validate_schema_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gp_schema.ValidateSchemaRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.validate_schema(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.ValidateSchemaRequest( + parent="parent_value", + ) + + +def test_validate_schema_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.validate_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.validate_schema] = mock_rpc + request = {} + client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.validate_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_validate_schema_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.validate_schema + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.validate_schema + ] = mock_rpc + + request = {} + await client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.validate_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_validate_schema_async( + transport: str = "grpc_asyncio", request_type=gp_schema.ValidateSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.ValidateSchemaResponse() + ) + response = await client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gp_schema.ValidateSchemaRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.ValidateSchemaResponse) + + +@pytest.mark.asyncio +async def test_validate_schema_async_from_dict(): + await test_validate_schema_async(request_type=dict) + + +def test_validate_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gp_schema.ValidateSchemaRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + call.return_value = gp_schema.ValidateSchemaResponse() + client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_validate_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gp_schema.ValidateSchemaRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.ValidateSchemaResponse() + ) + await client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_validate_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.ValidateSchemaResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.validate_schema( + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].schema + mock_val = gp_schema.Schema(name="name_value") + assert arg == mock_val + + +def test_validate_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.validate_schema( + gp_schema.ValidateSchemaRequest(), + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_validate_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.ValidateSchemaResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.ValidateSchemaResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.validate_schema( + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].schema + mock_val = gp_schema.Schema(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_validate_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.validate_schema( + gp_schema.ValidateSchemaRequest(), + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.ValidateMessageRequest, + dict, + ], +) +def test_validate_message(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ValidateMessageResponse() + response = client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = schema.ValidateMessageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.ValidateMessageResponse) + + +def test_validate_message_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = schema.ValidateMessageRequest( + parent="parent_value", + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.validate_message(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ValidateMessageRequest( + parent="parent_value", + name="name_value", + ) + + +def test_validate_message_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.validate_message in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.validate_message + ] = mock_rpc + request = {} + client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.validate_message(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_validate_message_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.validate_message + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.validate_message + ] = mock_rpc + + request = {} + await client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.validate_message(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_validate_message_async( + transport: str = "grpc_asyncio", request_type=schema.ValidateMessageRequest +): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ValidateMessageResponse() + ) + response = await client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = schema.ValidateMessageRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.ValidateMessageResponse) + + +@pytest.mark.asyncio +async def test_validate_message_async_from_dict(): + await test_validate_message_async(request_type=dict) + + +def test_validate_message_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.ValidateMessageRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + call.return_value = schema.ValidateMessageResponse() + client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_validate_message_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.ValidateMessageRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ValidateMessageResponse() + ) + await client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_schema_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_schema] = mock_rpc + + request = {} + client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_schema_rest_required_fields(request_type=gp_schema.CreateSchemaRequest): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_schema._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("schema_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gp_schema.Schema() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_schema._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("schemaId",)) + & set( + ( + "parent", + "schema", + ) + ) + ) + + +def test_create_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gp_schema.Schema() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + schema_id="schema_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*}/schemas" % client.transport._host, args[1] + ) + + +def test_create_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_schema( + gp_schema.CreateSchemaRequest(), + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + schema_id="schema_id_value", + ) + + +def test_get_schema_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_schema] = mock_rpc + + request = {} + client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_schema_rest_required_fields(request_type=schema.GetSchemaRequest): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_schema._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schema.Schema() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_schema._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +def test_get_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/schemas/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/schemas/*}" % client.transport._host, args[1] + ) + + +def test_get_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_schema( + schema.GetSchemaRequest(), + name="name_value", + ) + + +def test_list_schemas_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_schemas in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_schemas] = mock_rpc + + request = {} + client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_schemas(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_schemas_rest_required_fields(request_type=schema.ListSchemasRequest): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_schemas._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_schemas._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schema.ListSchemasResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.ListSchemasResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_schemas(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_schemas_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_schemas._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +def test_list_schemas_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.ListSchemasResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = schema.ListSchemasResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_schemas(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*}/schemas" % client.transport._host, args[1] + ) + + +def test_list_schemas_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_schemas( + schema.ListSchemasRequest(), + parent="parent_value", + ) + + +def test_list_schemas_rest_pager(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema.ListSchemasResponse( + schemas=[], + next_page_token="def", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(schema.ListSchemasResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1"} + + pager = client.list_schemas(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, schema.Schema) for i in results) + + pages = list(client.list_schemas(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_schema_revisions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_schema_revisions + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_schema_revisions + ] = mock_rpc + + request = {} + client.list_schema_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_schema_revisions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_schema_revisions_rest_required_fields( + request_type=schema.ListSchemaRevisionsRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_schema_revisions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_schema_revisions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schema.ListSchemaRevisionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.ListSchemaRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_schema_revisions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_schema_revisions_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_schema_revisions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "view", + ) + ) + & set(("name",)) + ) + + +def test_list_schema_revisions_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.ListSchemaRevisionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/schemas/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = schema.ListSchemaRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_schema_revisions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/schemas/*}:listRevisions" % client.transport._host, + args[1], + ) + + +def test_list_schema_revisions_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_schema_revisions( + schema.ListSchemaRevisionsRequest(), + name="name_value", + ) + + +def test_list_schema_revisions_rest_pager(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema.ListSchemaRevisionsResponse( + schemas=[], + next_page_token="def", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + schema.ListSchemaRevisionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"name": "projects/sample1/schemas/sample2"} + + pager = client.list_schema_revisions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, schema.Schema) for i in results) + + pages = list(client.list_schema_revisions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_commit_schema_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.commit_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.commit_schema] = mock_rpc + + request = {} + client.commit_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.commit_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_commit_schema_rest_required_fields(request_type=gp_schema.CommitSchemaRequest): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).commit_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).commit_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gp_schema.Schema() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.commit_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_commit_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.commit_schema._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "schema", + ) + ) + ) + + +def test_commit_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gp_schema.Schema() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/schemas/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + schema=gp_schema.Schema(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.commit_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/schemas/*}:commit" % client.transport._host, args[1] + ) + + +def test_commit_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.commit_schema( + gp_schema.CommitSchemaRequest(), + name="name_value", + schema=gp_schema.Schema(name="name_value"), + ) + + +def test_rollback_schema_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.rollback_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.rollback_schema] = mock_rpc + + request = {} + client.rollback_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.rollback_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_rollback_schema_rest_required_fields( + request_type=schema.RollbackSchemaRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["revision_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rollback_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["revisionId"] = "revision_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rollback_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "revisionId" in jsonified_request + assert jsonified_request["revisionId"] == "revision_id_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schema.Schema() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.rollback_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_rollback_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.rollback_schema._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "revisionId", + ) + ) + ) + + +def test_rollback_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/schemas/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + revision_id="revision_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.rollback_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/schemas/*}:rollback" % client.transport._host, + args[1], + ) + + +def test_rollback_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback_schema( + schema.RollbackSchemaRequest(), + name="name_value", + revision_id="revision_id_value", + ) + + +def test_delete_schema_revision_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_schema_revision + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_schema_revision + ] = mock_rpc + + request = {} + client.delete_schema_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_schema_revision(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_schema_revision_rest_required_fields( + request_type=schema.DeleteSchemaRevisionRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_schema_revision._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_schema_revision._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("revision_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schema.Schema() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_schema_revision(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_schema_revision_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_schema_revision._get_unset_required_fields({}) + assert set(unset_fields) == (set(("revisionId",)) & set(("name",))) + + +def test_delete_schema_revision_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/schemas/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + revision_id="revision_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_schema_revision(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/schemas/*}:deleteRevision" % client.transport._host, + args[1], + ) + + +def test_delete_schema_revision_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_schema_revision( + schema.DeleteSchemaRevisionRequest(), + name="name_value", + revision_id="revision_id_value", + ) + + +def test_delete_schema_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_schema] = mock_rpc + + request = {} + client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_schema_rest_required_fields(request_type=schema.DeleteSchemaRequest): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_schema._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/schemas/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/schemas/*}" % client.transport._host, args[1] + ) + + +def test_delete_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_schema( + schema.DeleteSchemaRequest(), + name="name_value", + ) + + +def test_validate_schema_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.validate_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.validate_schema] = mock_rpc + + request = {} + client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.validate_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_validate_schema_rest_required_fields( + request_type=gp_schema.ValidateSchemaRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).validate_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).validate_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gp_schema.ValidateSchemaResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gp_schema.ValidateSchemaResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.validate_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_validate_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.validate_schema._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "schema", + ) + ) + ) + + +def test_validate_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gp_schema.ValidateSchemaResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gp_schema.ValidateSchemaResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.validate_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*}/schemas:validate" % client.transport._host, + args[1], + ) + + +def test_validate_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.validate_schema( + gp_schema.ValidateSchemaRequest(), + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + ) + + +def test_validate_message_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.validate_message in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.validate_message + ] = mock_rpc + + request = {} + client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.validate_message(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_validate_message_rest_required_fields( + request_type=schema.ValidateMessageRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).validate_message._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).validate_message._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schema.ValidateMessageResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.ValidateMessageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.validate_message(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_validate_message_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.validate_message._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SchemaServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SchemaServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + transports.SchemaServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = SchemaServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_schema_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + call.return_value = gp_schema.Schema() + client.create_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.CreateSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_schema_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + call.return_value = schema.Schema() + client.get_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.GetSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_schemas_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + call.return_value = schema.ListSchemasResponse() + client.list_schemas(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ListSchemasRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_schema_revisions_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + call.return_value = schema.ListSchemaRevisionsResponse() + client.list_schema_revisions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ListSchemaRevisionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_commit_schema_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + call.return_value = gp_schema.Schema() + client.commit_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.CommitSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_rollback_schema_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + call.return_value = schema.Schema() + client.rollback_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.RollbackSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_schema_revision_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + call.return_value = schema.Schema() + client.delete_schema_revision(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.DeleteSchemaRevisionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_schema_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + call.return_value = None + client.delete_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.DeleteSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_validate_schema_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + call.return_value = gp_schema.ValidateSchemaResponse() + client.validate_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.ValidateSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_validate_message_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + call.return_value = schema.ValidateMessageResponse() + client.validate_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ValidateMessageRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = SchemaServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_schema_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + await client.create_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.CreateSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_schema_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + await client.get_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.GetSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_schemas_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemasResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_schemas(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ListSchemasRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_schema_revisions_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemaRevisionsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_schema_revisions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ListSchemaRevisionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_commit_schema_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + await client.commit_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.CommitSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_rollback_schema_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + await client.rollback_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.RollbackSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_schema_revision_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + await client.delete_schema_revision(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.DeleteSchemaRevisionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_schema_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.DeleteSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_validate_schema_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.ValidateSchemaResponse() + ) + await client.validate_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.ValidateSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_validate_message_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ValidateMessageResponse() + ) + await client.validate_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ValidateMessageRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = SchemaServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_schema_rest_bad_request(request_type=gp_schema.CreateSchemaRequest): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_schema(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gp_schema.CreateSchemaRequest, + dict, + ], +) +def test_create_schema_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["schema"] = { + "name": "name_value", + "type_": 1, + "definition": "definition_value", + "revision_id": "revision_id_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gp_schema.CreateSchemaRequest.meta.fields["schema"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["schema"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["schema"][field])): + del request_init["schema"][field][i][subfield] + else: + del request_init["schema"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.Schema) + assert response.name == "name_value" + assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_create_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_create_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_create_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gp_schema.CreateSchemaRequest.pb(gp_schema.CreateSchemaRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gp_schema.Schema.to_json(gp_schema.Schema()) + req.return_value.content = return_value + + request = gp_schema.CreateSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gp_schema.Schema() + post_with_metadata.return_value = gp_schema.Schema(), metadata + + client.create_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_schema_rest_bad_request(request_type=schema.GetSchemaRequest): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_schema(request) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.GetSchemaRequest, + dict, + ], +) +def test_get_schema_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_get_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_get_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_get_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = schema.GetSchemaRequest.pb(schema.GetSchemaRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = schema.Schema.to_json(schema.Schema()) + req.return_value.content = return_value + + request = schema.GetSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.Schema() + post_with_metadata.return_value = schema.Schema(), metadata + + client.get_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_schemas_rest_bad_request(request_type=schema.ListSchemasRequest): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_schemas(request) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.ListSchemasRequest, + dict, + ], +) +def test_list_schemas_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.ListSchemasResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.ListSchemasResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_schemas(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemasPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_schemas_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_list_schemas" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_list_schemas_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_list_schemas" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = schema.ListSchemasRequest.pb(schema.ListSchemasRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = schema.ListSchemasResponse.to_json(schema.ListSchemasResponse()) + req.return_value.content = return_value + + request = schema.ListSchemasRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.ListSchemasResponse() + post_with_metadata.return_value = schema.ListSchemasResponse(), metadata + + client.list_schemas( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_schema_revisions_rest_bad_request( + request_type=schema.ListSchemaRevisionsRequest, +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_schema_revisions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.ListSchemaRevisionsRequest, + dict, + ], +) +def test_list_schema_revisions_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.ListSchemaRevisionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.ListSchemaRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_schema_revisions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemaRevisionsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_schema_revisions_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_list_schema_revisions" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, + "post_list_schema_revisions_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_list_schema_revisions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = schema.ListSchemaRevisionsRequest.pb( + schema.ListSchemaRevisionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = schema.ListSchemaRevisionsResponse.to_json( + schema.ListSchemaRevisionsResponse() + ) + req.return_value.content = return_value + + request = schema.ListSchemaRevisionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.ListSchemaRevisionsResponse() + post_with_metadata.return_value = schema.ListSchemaRevisionsResponse(), metadata + + client.list_schema_revisions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_commit_schema_rest_bad_request(request_type=gp_schema.CommitSchemaRequest): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.commit_schema(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gp_schema.CommitSchemaRequest, + dict, + ], +) +def test_commit_schema_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.commit_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.Schema) + assert response.name == "name_value" + assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_commit_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_commit_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_commit_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_commit_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gp_schema.CommitSchemaRequest.pb(gp_schema.CommitSchemaRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gp_schema.Schema.to_json(gp_schema.Schema()) + req.return_value.content = return_value + + request = gp_schema.CommitSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gp_schema.Schema() + post_with_metadata.return_value = gp_schema.Schema(), metadata + + client.commit_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_rollback_schema_rest_bad_request(request_type=schema.RollbackSchemaRequest): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.rollback_schema(request) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.RollbackSchemaRequest, + dict, + ], +) +def test_rollback_schema_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.rollback_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_rollback_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_rollback_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_rollback_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_rollback_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = schema.RollbackSchemaRequest.pb(schema.RollbackSchemaRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = schema.Schema.to_json(schema.Schema()) + req.return_value.content = return_value + + request = schema.RollbackSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.Schema() + post_with_metadata.return_value = schema.Schema(), metadata + + client.rollback_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_schema_revision_rest_bad_request( + request_type=schema.DeleteSchemaRevisionRequest, +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_schema_revision(request) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.DeleteSchemaRevisionRequest, + dict, + ], +) +def test_delete_schema_revision_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_schema_revision(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_schema_revision_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_delete_schema_revision" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, + "post_delete_schema_revision_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_delete_schema_revision" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = schema.DeleteSchemaRevisionRequest.pb( + schema.DeleteSchemaRevisionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = schema.Schema.to_json(schema.Schema()) + req.return_value.content = return_value + + request = schema.DeleteSchemaRevisionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.Schema() + post_with_metadata.return_value = schema.Schema(), metadata + + client.delete_schema_revision( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_schema_rest_bad_request(request_type=schema.DeleteSchemaRequest): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_schema(request) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.DeleteSchemaRequest, + dict, + ], +) +def test_delete_schema_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_schema(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_delete_schema" + ) as pre: + pre.assert_not_called() + pb_message = schema.DeleteSchemaRequest.pb(schema.DeleteSchemaRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = schema.DeleteSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_validate_schema_rest_bad_request(request_type=gp_schema.ValidateSchemaRequest): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.validate_schema(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gp_schema.ValidateSchemaRequest, + dict, + ], +) +def test_validate_schema_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gp_schema.ValidateSchemaResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gp_schema.ValidateSchemaResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.validate_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.ValidateSchemaResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_validate_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_validate_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_validate_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_validate_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gp_schema.ValidateSchemaRequest.pb( + gp_schema.ValidateSchemaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gp_schema.ValidateSchemaResponse.to_json( + gp_schema.ValidateSchemaResponse() + ) + req.return_value.content = return_value + + request = gp_schema.ValidateSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gp_schema.ValidateSchemaResponse() + post_with_metadata.return_value = gp_schema.ValidateSchemaResponse(), metadata + + client.validate_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_validate_message_rest_bad_request(request_type=schema.ValidateMessageRequest): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.validate_message(request) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.ValidateMessageRequest, + dict, + ], +) +def test_validate_message_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.ValidateMessageResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.ValidateMessageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.validate_message(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.ValidateMessageResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_validate_message_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_validate_message" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_validate_message_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_validate_message" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = schema.ValidateMessageRequest.pb(schema.ValidateMessageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = schema.ValidateMessageResponse.to_json( + schema.ValidateMessageResponse() + ) + req.return_value.content = return_value + + request = schema.ValidateMessageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.ValidateMessageResponse() + post_with_metadata.return_value = schema.ValidateMessageResponse(), metadata + + client.validate_message( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/subscriptions/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_initialize_client_w_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_schema_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + client.create_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.CreateSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_schema_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + client.get_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.GetSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_schemas_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + client.list_schemas(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ListSchemasRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_schema_revisions_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + client.list_schema_revisions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ListSchemaRevisionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_commit_schema_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + client.commit_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.CommitSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_rollback_schema_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + client.rollback_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.RollbackSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_schema_revision_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + client.delete_schema_revision(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.DeleteSchemaRevisionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_schema_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + client.delete_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.DeleteSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_validate_schema_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + client.validate_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.ValidateSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_validate_message_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + client.validate_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ValidateMessageRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SchemaServiceGrpcTransport, + ) + + +def test_schema_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SchemaServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_schema_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SchemaServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_schema", + "get_schema", + "list_schemas", + "list_schema_revisions", + "commit_schema", + "rollback_schema", + "delete_schema_revision", + "delete_schema", + "validate_schema", + "validate_message", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_schema_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SchemaServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +def test_schema_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SchemaServiceTransport() + adc.assert_called_once() + + +def test_schema_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SchemaServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + transports.SchemaServiceRestTransport, + ], +) +def test_schema_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SchemaServiceGrpcTransport, grpc_helpers), + (transports.SchemaServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_schema_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + scopes=["1", "2"], + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_schema_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SchemaServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_schema_service_host_no_port(transport_name): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="pubsub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_schema_service_host_with_port(transport_name): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="pubsub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "pubsub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_schema_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SchemaServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SchemaServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_schema._session + session2 = client2.transport.create_schema._session + assert session1 != session2 + session1 = client1.transport.get_schema._session + session2 = client2.transport.get_schema._session + assert session1 != session2 + session1 = client1.transport.list_schemas._session + session2 = client2.transport.list_schemas._session + assert session1 != session2 + session1 = client1.transport.list_schema_revisions._session + session2 = client2.transport.list_schema_revisions._session + assert session1 != session2 + session1 = client1.transport.commit_schema._session + session2 = client2.transport.commit_schema._session + assert session1 != session2 + session1 = client1.transport.rollback_schema._session + session2 = client2.transport.rollback_schema._session + assert session1 != session2 + session1 = client1.transport.delete_schema_revision._session + session2 = client2.transport.delete_schema_revision._session + assert session1 != session2 + session1 = client1.transport.delete_schema._session + session2 = client2.transport.delete_schema._session + assert session1 != session2 + session1 = client1.transport.validate_schema._session + session2 = client2.transport.validate_schema._session + assert session1 != session2 + session1 = client1.transport.validate_message._session + session2 = client2.transport.validate_message._session + assert session1 != session2 + + +def test_schema_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SchemaServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_schema_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SchemaServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_schema_path(): + project = "squid" + schema = "clam" + expected = "projects/{project}/schemas/{schema}".format( + project=project, + schema=schema, + ) + actual = SchemaServiceClient.schema_path(project, schema) + assert expected == actual + + +def test_parse_schema_path(): + expected = { + "project": "whelk", + "schema": "octopus", + } + path = SchemaServiceClient.schema_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_schema_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SchemaServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = SchemaServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SchemaServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = SchemaServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SchemaServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = SchemaServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = SchemaServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = SchemaServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SchemaServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = SchemaServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SchemaServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SchemaServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SchemaServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_set_iam_policy(transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport), + (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/tests/unit/gapic/pubsub_v1/test_subscriber.py b/tests/unit/gapic/pubsub_v1/test_subscriber.py new file mode 100644 index 000000000..816b04500 --- /dev/null +++ b/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -0,0 +1,14224 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import warnings + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +from collections.abc import Iterable, AsyncIterable +from google.protobuf import json_format +import json +import math +import pytest +from google.api_core import api_core_version +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.pubsub_v1.services.subscriber import SubscriberAsyncClient +from google.pubsub_v1.services.subscriber import SubscriberClient +from google.pubsub_v1.services.subscriber import pagers +from google.pubsub_v1.services.subscriber import transports +from google.pubsub_v1.types import pubsub +import google.auth + + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SubscriberClient._get_default_mtls_endpoint(None) is None + assert ( + SubscriberClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + SubscriberClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SubscriberClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SubscriberClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert SubscriberClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +def test__read_environment_variables(): + assert SubscriberClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SubscriberClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert SubscriberClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + SubscriberClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert SubscriberClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert SubscriberClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert SubscriberClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert SubscriberClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + SubscriberClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert SubscriberClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): + assert SubscriberClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): + assert SubscriberClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SubscriberClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): + assert SubscriberClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert SubscriberClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): + assert SubscriberClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert SubscriberClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): + assert SubscriberClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert SubscriberClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + with pytest.raises(ValueError): + SubscriberClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + assert SubscriberClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert SubscriberClient._use_client_cert_effective() is False + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert SubscriberClient._get_client_cert_source(None, False) is None + assert ( + SubscriberClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + SubscriberClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + SubscriberClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + SubscriberClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + SubscriberClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SubscriberClient), +) +@mock.patch.object( + SubscriberAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SubscriberAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = SubscriberClient._DEFAULT_UNIVERSE + default_endpoint = SubscriberClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SubscriberClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + SubscriberClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + SubscriberClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == SubscriberClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SubscriberClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + SubscriberClient._get_api_endpoint(None, None, default_universe, "always") + == SubscriberClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SubscriberClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == SubscriberClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SubscriberClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + SubscriberClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + SubscriberClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + SubscriberClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + SubscriberClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + SubscriberClient._get_universe_domain(None, None) + == SubscriberClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + SubscriberClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SubscriberClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SubscriberClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SubscriberClient, "grpc"), + (SubscriberAsyncClient, "grpc_asyncio"), + (SubscriberClient, "rest"), + ], +) +def test_subscriber_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SubscriberGrpcTransport, "grpc"), + (transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SubscriberRestTransport, "rest"), + ], +) +def test_subscriber_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (SubscriberClient, "grpc"), + (SubscriberAsyncClient, "grpc_asyncio"), + (SubscriberClient, "rest"), + ], +) +def test_subscriber_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) + + +def test_subscriber_client_get_transport_class(): + transport = SubscriberClient.get_transport_class() + available_transports = [ + transports.SubscriberGrpcTransport, + transports.SubscriberRestTransport, + ] + assert transport in available_transports + + transport = SubscriberClient.get_transport_class("grpc") + assert transport == transports.SubscriberGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc"), + ( + SubscriberAsyncClient, + transports.SubscriberGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SubscriberClient, transports.SubscriberRestTransport, "rest"), + ], +) +@mock.patch.object( + SubscriberClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SubscriberClient), +) +@mock.patch.object( + SubscriberAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SubscriberAsyncClient), +) +def test_subscriber_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SubscriberClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SubscriberClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc", "true"), + ( + SubscriberAsyncClient, + transports.SubscriberGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc", "false"), + ( + SubscriberAsyncClient, + transports.SubscriberGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (SubscriberClient, transports.SubscriberRestTransport, "rest", "true"), + (SubscriberClient, transports.SubscriberRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + SubscriberClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SubscriberClient), +) +@mock.patch.object( + SubscriberAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SubscriberAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_subscriber_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient]) +@mock.patch.object( + SubscriberClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberClient) +) +@mock.patch.object( + SubscriberAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SubscriberAsyncClient), +) +def test_subscriber_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + +@pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient]) +@mock.patch.object( + SubscriberClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SubscriberClient), +) +@mock.patch.object( + SubscriberAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SubscriberAsyncClient), +) +def test_subscriber_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = SubscriberClient._DEFAULT_UNIVERSE + default_endpoint = SubscriberClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SubscriberClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc"), + ( + SubscriberAsyncClient, + transports.SubscriberGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (SubscriberClient, transports.SubscriberRestTransport, "rest"), + ], +) +def test_subscriber_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc", grpc_helpers), + ( + SubscriberAsyncClient, + transports.SubscriberGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (SubscriberClient, transports.SubscriberRestTransport, "rest", None), + ], +) +def test_subscriber_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_subscriber_client_client_options_from_dict(): + with mock.patch( + "google.pubsub_v1.services.subscriber.transports.SubscriberGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SubscriberClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc", grpc_helpers), + ( + SubscriberAsyncClient, + transports.SubscriberGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_subscriber_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + scopes=None, + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.Subscription, + dict, + ], +) +def test_create_subscription(request_type, transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + response = client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.Subscription() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == "name_value" + assert response.topic == "topic_value" + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == "filter_value" + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +def test_create_subscription_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.Subscription( + name="name_value", + topic="topic_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_subscription(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.Subscription( + name="name_value", + topic="topic_value", + filter="filter_value", + ) + + +def test_create_subscription_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_subscription in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_subscription + ] = mock_rpc + request = {} + client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_subscription_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_subscription + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_subscription + ] = mock_rpc + + request = {} + await client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_subscription_async( + transport: str = "grpc_asyncio", request_type=pubsub.Subscription +): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + ) + response = await client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.Subscription() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == "name_value" + assert response.topic == "topic_value" + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == "filter_value" + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +@pytest.mark.asyncio +async def test_create_subscription_async_from_dict(): + await test_create_subscription_async(request_type=dict) + + +def test_create_subscription_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.Subscription() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), "__call__" + ) as call: + call.return_value = pubsub.Subscription() + client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_subscription_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.Subscription() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + await client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_create_subscription_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_subscription( + name="name_value", + topic="topic_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ack_deadline_seconds=2066, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val + arg = args[0].push_config + mock_val = pubsub.PushConfig(push_endpoint="push_endpoint_value") + assert arg == mock_val + arg = args[0].ack_deadline_seconds + mock_val = 2066 + assert arg == mock_val + + +def test_create_subscription_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_subscription( + pubsub.Subscription(), + name="name_value", + topic="topic_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ack_deadline_seconds=2066, + ) + + +@pytest.mark.asyncio +async def test_create_subscription_flattened_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_subscription( + name="name_value", + topic="topic_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ack_deadline_seconds=2066, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val + arg = args[0].push_config + mock_val = pubsub.PushConfig(push_endpoint="push_endpoint_value") + assert arg == mock_val + arg = args[0].ack_deadline_seconds + mock_val = 2066 + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_subscription_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_subscription( + pubsub.Subscription(), + name="name_value", + topic="topic_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ack_deadline_seconds=2066, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.GetSubscriptionRequest, + dict, + ], +) +def test_get_subscription(request_type, transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + response = client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.GetSubscriptionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == "name_value" + assert response.topic == "topic_value" + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == "filter_value" + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +def test_get_subscription_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.GetSubscriptionRequest( + subscription="subscription_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_subscription(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetSubscriptionRequest( + subscription="subscription_value", + ) + + +def test_get_subscription_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_subscription in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_subscription + ] = mock_rpc + request = {} + client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_subscription_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_subscription + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_subscription + ] = mock_rpc + + request = {} + await client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_subscription_async( + transport: str = "grpc_asyncio", request_type=pubsub.GetSubscriptionRequest +): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + ) + response = await client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.GetSubscriptionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == "name_value" + assert response.topic == "topic_value" + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == "filter_value" + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +@pytest.mark.asyncio +async def test_get_subscription_async_from_dict(): + await test_get_subscription_async(request_type=dict) + + +def test_get_subscription_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetSubscriptionRequest() + + request.subscription = "subscription_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + call.return_value = pubsub.Subscription() + client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription=subscription_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_subscription_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetSubscriptionRequest() + + request.subscription = "subscription_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + await client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription=subscription_value", + ) in kw["metadata"] + + +def test_get_subscription_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_subscription( + subscription="subscription_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + + +def test_get_subscription_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_subscription( + pubsub.GetSubscriptionRequest(), + subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_get_subscription_flattened_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_subscription( + subscription="subscription_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_subscription_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_subscription( + pubsub.GetSubscriptionRequest(), + subscription="subscription_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.UpdateSubscriptionRequest, + dict, + ], +) +def test_update_subscription(request_type, transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + response = client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.UpdateSubscriptionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == "name_value" + assert response.topic == "topic_value" + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == "filter_value" + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +def test_update_subscription_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.UpdateSubscriptionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_subscription(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateSubscriptionRequest() + + +def test_update_subscription_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_subscription in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_subscription + ] = mock_rpc + request = {} + client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_subscription_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_subscription + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_subscription + ] = mock_rpc + + request = {} + await client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_subscription_async( + transport: str = "grpc_asyncio", request_type=pubsub.UpdateSubscriptionRequest +): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + ) + response = await client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.UpdateSubscriptionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == "name_value" + assert response.topic == "topic_value" + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == "filter_value" + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +@pytest.mark.asyncio +async def test_update_subscription_async_from_dict(): + await test_update_subscription_async(request_type=dict) + + +def test_update_subscription_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateSubscriptionRequest() + + request.subscription.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + call.return_value = pubsub.Subscription() + client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_subscription_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateSubscriptionRequest() + + request.subscription.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + await client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription.name=name_value", + ) in kw["metadata"] + + +def test_update_subscription_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_subscription( + subscription=pubsub.Subscription(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = pubsub.Subscription(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_subscription_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_subscription( + pubsub.UpdateSubscriptionRequest(), + subscription=pubsub.Subscription(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_subscription_flattened_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_subscription( + subscription=pubsub.Subscription(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = pubsub.Subscription(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_subscription_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_subscription( + pubsub.UpdateSubscriptionRequest(), + subscription=pubsub.Subscription(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListSubscriptionsRequest, + dict, + ], +) +def test_list_subscriptions(request_type, transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSubscriptionsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.ListSubscriptionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubscriptionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_subscriptions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.ListSubscriptionsRequest( + project="project_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_subscriptions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListSubscriptionsRequest( + project="project_value", + page_token="page_token_value", + ) + + +def test_list_subscriptions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_subscriptions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_subscriptions + ] = mock_rpc + request = {} + client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_subscriptions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_subscriptions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_subscriptions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_subscriptions + ] = mock_rpc + + request = {} + await client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_subscriptions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_subscriptions_async( + transport: str = "grpc_asyncio", request_type=pubsub.ListSubscriptionsRequest +): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSubscriptionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.ListSubscriptionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubscriptionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_subscriptions_async_from_dict(): + await test_list_subscriptions_async(request_type=dict) + + +def test_list_subscriptions_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListSubscriptionsRequest() + + request.project = "project_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + call.return_value = pubsub.ListSubscriptionsResponse() + client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project=project_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_subscriptions_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListSubscriptionsRequest() + + request.project = "project_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSubscriptionsResponse() + ) + await client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project=project_value", + ) in kw["metadata"] + + +def test_list_subscriptions_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSubscriptionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_subscriptions( + project="project_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project + mock_val = "project_value" + assert arg == mock_val + + +def test_list_subscriptions_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_subscriptions( + pubsub.ListSubscriptionsRequest(), + project="project_value", + ) + + +@pytest.mark.asyncio +async def test_list_subscriptions_flattened_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSubscriptionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSubscriptionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_subscriptions( + project="project_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project + mock_val = "project_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_subscriptions_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_subscriptions( + pubsub.ListSubscriptionsRequest(), + project="project_value", + ) + + +def test_list_subscriptions_pager(transport_name: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token="abc", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[], + next_page_token="def", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + ], + next_page_token="ghi", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", ""),)), + ) + pager = client.list_subscriptions(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, pubsub.Subscription) for i in results) + + +def test_list_subscriptions_pages(transport_name: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token="abc", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[], + next_page_token="def", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + ], + next_page_token="ghi", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + ], + ), + RuntimeError, + ) + pages = list(client.list_subscriptions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_subscriptions_async_pager(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token="abc", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[], + next_page_token="def", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + ], + next_page_token="ghi", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_subscriptions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, pubsub.Subscription) for i in responses) + + +@pytest.mark.asyncio +async def test_list_subscriptions_async_pages(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token="abc", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[], + next_page_token="def", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + ], + next_page_token="ghi", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_subscriptions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DeleteSubscriptionRequest, + dict, + ], +) +def test_delete_subscription(request_type, transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.DeleteSubscriptionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_subscription_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.DeleteSubscriptionRequest( + subscription="subscription_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_subscription(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteSubscriptionRequest( + subscription="subscription_value", + ) + + +def test_delete_subscription_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_subscription in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_subscription + ] = mock_rpc + request = {} + client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_subscription_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_subscription + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_subscription + ] = mock_rpc + + request = {} + await client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_subscription_async( + transport: str = "grpc_asyncio", request_type=pubsub.DeleteSubscriptionRequest +): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.DeleteSubscriptionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_subscription_async_from_dict(): + await test_delete_subscription_async(request_type=dict) + + +def test_delete_subscription_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteSubscriptionRequest() + + request.subscription = "subscription_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + call.return_value = None + client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription=subscription_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_subscription_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteSubscriptionRequest() + + request.subscription = "subscription_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription=subscription_value", + ) in kw["metadata"] + + +def test_delete_subscription_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_subscription( + subscription="subscription_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + + +def test_delete_subscription_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_subscription( + pubsub.DeleteSubscriptionRequest(), + subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_delete_subscription_flattened_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_subscription( + subscription="subscription_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_subscription_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_subscription( + pubsub.DeleteSubscriptionRequest(), + subscription="subscription_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ModifyAckDeadlineRequest, + dict, + ], +) +def test_modify_ack_deadline(request_type, transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.ModifyAckDeadlineRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_modify_ack_deadline_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.ModifyAckDeadlineRequest( + subscription="subscription_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.modify_ack_deadline(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ModifyAckDeadlineRequest( + subscription="subscription_value", + ) + + +def test_modify_ack_deadline_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.modify_ack_deadline in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.modify_ack_deadline + ] = mock_rpc + request = {} + client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.modify_ack_deadline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.modify_ack_deadline + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.modify_ack_deadline + ] = mock_rpc + + request = {} + await client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.modify_ack_deadline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_async( + transport: str = "grpc_asyncio", request_type=pubsub.ModifyAckDeadlineRequest +): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.ModifyAckDeadlineRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_async_from_dict(): + await test_modify_ack_deadline_async(request_type=dict) + + +def test_modify_ack_deadline_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ModifyAckDeadlineRequest() + + request.subscription = "subscription_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), "__call__" + ) as call: + call.return_value = None + client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription=subscription_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ModifyAckDeadlineRequest() + + request.subscription = "subscription_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription=subscription_value", + ) in kw["metadata"] + + +def test_modify_ack_deadline_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.modify_ack_deadline( + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ack_deadline_seconds=2066, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + arg = args[0].ack_ids + mock_val = ["ack_ids_value"] + assert arg == mock_val + arg = args[0].ack_deadline_seconds + mock_val = 2066 + assert arg == mock_val + + +def test_modify_ack_deadline_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.modify_ack_deadline( + pubsub.ModifyAckDeadlineRequest(), + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ack_deadline_seconds=2066, + ) + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_flattened_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.modify_ack_deadline( + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ack_deadline_seconds=2066, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + arg = args[0].ack_ids + mock_val = ["ack_ids_value"] + assert arg == mock_val + arg = args[0].ack_deadline_seconds + mock_val = 2066 + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.modify_ack_deadline( + pubsub.ModifyAckDeadlineRequest(), + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ack_deadline_seconds=2066, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.AcknowledgeRequest, + dict, + ], +) +def test_acknowledge(request_type, transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.AcknowledgeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_acknowledge_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.AcknowledgeRequest( + subscription="subscription_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.acknowledge(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.AcknowledgeRequest( + subscription="subscription_value", + ) + + +def test_acknowledge_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.acknowledge in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.acknowledge] = mock_rpc + request = {} + client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.acknowledge(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_acknowledge_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.acknowledge + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.acknowledge + ] = mock_rpc + + request = {} + await client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.acknowledge(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_acknowledge_async( + transport: str = "grpc_asyncio", request_type=pubsub.AcknowledgeRequest +): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.AcknowledgeRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_acknowledge_async_from_dict(): + await test_acknowledge_async(request_type=dict) + + +def test_acknowledge_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.AcknowledgeRequest() + + request.subscription = "subscription_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + call.return_value = None + client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription=subscription_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_acknowledge_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.AcknowledgeRequest() + + request.subscription = "subscription_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription=subscription_value", + ) in kw["metadata"] + + +def test_acknowledge_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.acknowledge( + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + arg = args[0].ack_ids + mock_val = ["ack_ids_value"] + assert arg == mock_val + + +def test_acknowledge_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.acknowledge( + pubsub.AcknowledgeRequest(), + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ) + + +@pytest.mark.asyncio +async def test_acknowledge_flattened_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.acknowledge( + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + arg = args[0].ack_ids + mock_val = ["ack_ids_value"] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_acknowledge_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.acknowledge( + pubsub.AcknowledgeRequest(), + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.PullRequest, + dict, + ], +) +def test_pull(request_type, transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PullResponse() + response = client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.PullRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PullResponse) + + +def test_pull_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.PullRequest( + subscription="subscription_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pull), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.pull(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.PullRequest( + subscription="subscription_value", + ) + + +def test_pull_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.pull in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.pull] = mock_rpc + request = {} + client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.pull(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_pull_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.pull in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.pull + ] = mock_rpc + + request = {} + await client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.pull(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_pull_async( + transport: str = "grpc_asyncio", request_type=pubsub.PullRequest +): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) + response = await client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.PullRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PullResponse) + + +@pytest.mark.asyncio +async def test_pull_async_from_dict(): + await test_pull_async(request_type=dict) + + +def test_pull_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.PullRequest() + + request.subscription = "subscription_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pull), "__call__") as call: + call.return_value = pubsub.PullResponse() + client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription=subscription_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_pull_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.PullRequest() + + request.subscription = "subscription_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pull), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) + await client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription=subscription_value", + ) in kw["metadata"] + + +def test_pull_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PullResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + client.pull( + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + arg = args[0].return_immediately + mock_val = True + assert arg == mock_val + arg = args[0].max_messages + mock_val = 1277 + assert arg == mock_val + + +def test_pull_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pull( + pubsub.PullRequest(), + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) + + +@pytest.mark.asyncio +async def test_pull_flattened_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PullResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + await client.pull( + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + arg = args[0].return_immediately + mock_val = True + assert arg == mock_val + arg = args[0].max_messages + mock_val = 1277 + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_pull_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pull( + pubsub.PullRequest(), + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.StreamingPullRequest, + dict, + ], +) +def test_streaming_pull(request_type, transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.streaming_pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = iter([pubsub.StreamingPullResponse()]) + response = client.streaming_pull(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, pubsub.StreamingPullResponse) + + +def test_streaming_pull_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.streaming_pull in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.streaming_pull] = mock_rpc + request = [{}] + client.streaming_pull(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.streaming_pull(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_streaming_pull_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.streaming_pull + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.streaming_pull + ] = mock_rpc + + request = [{}] + await client.streaming_pull(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.streaming_pull(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_streaming_pull_async( + transport: str = "grpc_asyncio", request_type=pubsub.StreamingPullRequest +): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.streaming_pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[pubsub.StreamingPullResponse()] + ) + response = await client.streaming_pull(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, pubsub.StreamingPullResponse) + + +@pytest.mark.asyncio +async def test_streaming_pull_async_from_dict(): + await test_streaming_pull_async(request_type=dict) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ModifyPushConfigRequest, + dict, + ], +) +def test_modify_push_config(request_type, transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.ModifyPushConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_modify_push_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.ModifyPushConfigRequest( + subscription="subscription_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.modify_push_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ModifyPushConfigRequest( + subscription="subscription_value", + ) + + +def test_modify_push_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.modify_push_config in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.modify_push_config + ] = mock_rpc + request = {} + client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.modify_push_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_modify_push_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.modify_push_config + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.modify_push_config + ] = mock_rpc + + request = {} + await client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.modify_push_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_modify_push_config_async( + transport: str = "grpc_asyncio", request_type=pubsub.ModifyPushConfigRequest +): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.ModifyPushConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_modify_push_config_async_from_dict(): + await test_modify_push_config_async(request_type=dict) + + +def test_modify_push_config_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ModifyPushConfigRequest() + + request.subscription = "subscription_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), "__call__" + ) as call: + call.return_value = None + client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription=subscription_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_modify_push_config_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ModifyPushConfigRequest() + + request.subscription = "subscription_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription=subscription_value", + ) in kw["metadata"] + + +def test_modify_push_config_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.modify_push_config( + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + arg = args[0].push_config + mock_val = pubsub.PushConfig(push_endpoint="push_endpoint_value") + assert arg == mock_val + + +def test_modify_push_config_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.modify_push_config( + pubsub.ModifyPushConfigRequest(), + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ) + + +@pytest.mark.asyncio +async def test_modify_push_config_flattened_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.modify_push_config( + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + arg = args[0].push_config + mock_val = pubsub.PushConfig(push_endpoint="push_endpoint_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_modify_push_config_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.modify_push_config( + pubsub.ModifyPushConfigRequest(), + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.GetSnapshotRequest, + dict, + ], +) +def test_get_snapshot(request_type, transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + response = client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.GetSnapshotRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == "name_value" + assert response.topic == "topic_value" + + +def test_get_snapshot_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.GetSnapshotRequest( + snapshot="snapshot_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_snapshot(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetSnapshotRequest( + snapshot="snapshot_value", + ) + + +def test_get_snapshot_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_snapshot] = mock_rpc + request = {} + client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_snapshot_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_snapshot + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_snapshot + ] = mock_rpc + + request = {} + await client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_snapshot_async( + transport: str = "grpc_asyncio", request_type=pubsub.GetSnapshotRequest +): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + ) + response = await client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.GetSnapshotRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == "name_value" + assert response.topic == "topic_value" + + +@pytest.mark.asyncio +async def test_get_snapshot_async_from_dict(): + await test_get_snapshot_async(request_type=dict) + + +def test_get_snapshot_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetSnapshotRequest() + + request.snapshot = "snapshot_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + call.return_value = pubsub.Snapshot() + client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "snapshot=snapshot_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_snapshot_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetSnapshotRequest() + + request.snapshot = "snapshot_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + await client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "snapshot=snapshot_value", + ) in kw["metadata"] + + +def test_get_snapshot_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_snapshot( + snapshot="snapshot_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].snapshot + mock_val = "snapshot_value" + assert arg == mock_val + + +def test_get_snapshot_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_snapshot( + pubsub.GetSnapshotRequest(), + snapshot="snapshot_value", + ) + + +@pytest.mark.asyncio +async def test_get_snapshot_flattened_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_snapshot( + snapshot="snapshot_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].snapshot + mock_val = "snapshot_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_snapshot_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_snapshot( + pubsub.GetSnapshotRequest(), + snapshot="snapshot_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListSnapshotsRequest, + dict, + ], +) +def test_list_snapshots(request_type, transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSnapshotsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.ListSnapshotsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSnapshotsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_snapshots_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.ListSnapshotsRequest( + project="project_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_snapshots(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListSnapshotsRequest( + project="project_value", + page_token="page_token_value", + ) + + +def test_list_snapshots_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_snapshots in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_snapshots] = mock_rpc + request = {} + client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_snapshots(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_snapshots_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_snapshots + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_snapshots + ] = mock_rpc + + request = {} + await client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_snapshots(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_snapshots_async( + transport: str = "grpc_asyncio", request_type=pubsub.ListSnapshotsRequest +): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSnapshotsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.ListSnapshotsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSnapshotsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_snapshots_async_from_dict(): + await test_list_snapshots_async(request_type=dict) + + +def test_list_snapshots_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListSnapshotsRequest() + + request.project = "project_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + call.return_value = pubsub.ListSnapshotsResponse() + client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project=project_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_snapshots_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListSnapshotsRequest() + + request.project = "project_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSnapshotsResponse() + ) + await client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "project=project_value", + ) in kw["metadata"] + + +def test_list_snapshots_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSnapshotsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_snapshots( + project="project_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project + mock_val = "project_value" + assert arg == mock_val + + +def test_list_snapshots_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_snapshots( + pubsub.ListSnapshotsRequest(), + project="project_value", + ) + + +@pytest.mark.asyncio +async def test_list_snapshots_flattened_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSnapshotsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSnapshotsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_snapshots( + project="project_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project + mock_val = "project_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_snapshots_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_snapshots( + pubsub.ListSnapshotsRequest(), + project="project_value", + ) + + +def test_list_snapshots_pager(transport_name: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + next_page_token="abc", + ), + pubsub.ListSnapshotsResponse( + snapshots=[], + next_page_token="def", + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + ], + next_page_token="ghi", + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", ""),)), + ) + pager = client.list_snapshots(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, pubsub.Snapshot) for i in results) + + +def test_list_snapshots_pages(transport_name: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + next_page_token="abc", + ), + pubsub.ListSnapshotsResponse( + snapshots=[], + next_page_token="def", + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + ], + next_page_token="ghi", + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + ), + RuntimeError, + ) + pages = list(client.list_snapshots(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_snapshots_async_pager(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + next_page_token="abc", + ), + pubsub.ListSnapshotsResponse( + snapshots=[], + next_page_token="def", + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + ], + next_page_token="ghi", + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_snapshots( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, pubsub.Snapshot) for i in responses) + + +@pytest.mark.asyncio +async def test_list_snapshots_async_pages(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + next_page_token="abc", + ), + pubsub.ListSnapshotsResponse( + snapshots=[], + next_page_token="def", + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + ], + next_page_token="ghi", + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_snapshots(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.CreateSnapshotRequest, + dict, + ], +) +def test_create_snapshot(request_type, transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + response = client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.CreateSnapshotRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == "name_value" + assert response.topic == "topic_value" + + +def test_create_snapshot_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_snapshot(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", + ) + + +def test_create_snapshot_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_snapshot] = mock_rpc + request = {} + client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_snapshot_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_snapshot + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_snapshot + ] = mock_rpc + + request = {} + await client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_snapshot_async( + transport: str = "grpc_asyncio", request_type=pubsub.CreateSnapshotRequest +): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + ) + response = await client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.CreateSnapshotRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == "name_value" + assert response.topic == "topic_value" + + +@pytest.mark.asyncio +async def test_create_snapshot_async_from_dict(): + await test_create_snapshot_async(request_type=dict) + + +def test_create_snapshot_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.CreateSnapshotRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + call.return_value = pubsub.Snapshot() + client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_snapshot_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.CreateSnapshotRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + await client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_create_snapshot_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_snapshot( + name="name_value", + subscription="subscription_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + + +def test_create_snapshot_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_snapshot( + pubsub.CreateSnapshotRequest(), + name="name_value", + subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_create_snapshot_flattened_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_snapshot( + name="name_value", + subscription="subscription_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_snapshot_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_snapshot( + pubsub.CreateSnapshotRequest(), + name="name_value", + subscription="subscription_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.UpdateSnapshotRequest, + dict, + ], +) +def test_update_snapshot(request_type, transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + response = client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.UpdateSnapshotRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == "name_value" + assert response.topic == "topic_value" + + +def test_update_snapshot_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.UpdateSnapshotRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_snapshot(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateSnapshotRequest() + + +def test_update_snapshot_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_snapshot] = mock_rpc + request = {} + client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_snapshot_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_snapshot + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_snapshot + ] = mock_rpc + + request = {} + await client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_snapshot_async( + transport: str = "grpc_asyncio", request_type=pubsub.UpdateSnapshotRequest +): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + ) + response = await client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.UpdateSnapshotRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == "name_value" + assert response.topic == "topic_value" + + +@pytest.mark.asyncio +async def test_update_snapshot_async_from_dict(): + await test_update_snapshot_async(request_type=dict) + + +def test_update_snapshot_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateSnapshotRequest() + + request.snapshot.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + call.return_value = pubsub.Snapshot() + client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "snapshot.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_snapshot_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateSnapshotRequest() + + request.snapshot.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + await client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "snapshot.name=name_value", + ) in kw["metadata"] + + +def test_update_snapshot_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_snapshot( + snapshot=pubsub.Snapshot(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].snapshot + mock_val = pubsub.Snapshot(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_snapshot_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_snapshot( + pubsub.UpdateSnapshotRequest(), + snapshot=pubsub.Snapshot(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_snapshot_flattened_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_snapshot( + snapshot=pubsub.Snapshot(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].snapshot + mock_val = pubsub.Snapshot(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_snapshot_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_snapshot( + pubsub.UpdateSnapshotRequest(), + snapshot=pubsub.Snapshot(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DeleteSnapshotRequest, + dict, + ], +) +def test_delete_snapshot(request_type, transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.DeleteSnapshotRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_snapshot_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.DeleteSnapshotRequest( + snapshot="snapshot_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_snapshot(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteSnapshotRequest( + snapshot="snapshot_value", + ) + + +def test_delete_snapshot_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_snapshot] = mock_rpc + request = {} + client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_snapshot_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_snapshot + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_snapshot + ] = mock_rpc + + request = {} + await client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_snapshot_async( + transport: str = "grpc_asyncio", request_type=pubsub.DeleteSnapshotRequest +): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.DeleteSnapshotRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_snapshot_async_from_dict(): + await test_delete_snapshot_async(request_type=dict) + + +def test_delete_snapshot_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteSnapshotRequest() + + request.snapshot = "snapshot_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + call.return_value = None + client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "snapshot=snapshot_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_snapshot_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteSnapshotRequest() + + request.snapshot = "snapshot_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "snapshot=snapshot_value", + ) in kw["metadata"] + + +def test_delete_snapshot_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_snapshot( + snapshot="snapshot_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].snapshot + mock_val = "snapshot_value" + assert arg == mock_val + + +def test_delete_snapshot_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_snapshot( + pubsub.DeleteSnapshotRequest(), + snapshot="snapshot_value", + ) + + +@pytest.mark.asyncio +async def test_delete_snapshot_flattened_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_snapshot( + snapshot="snapshot_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].snapshot + mock_val = "snapshot_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_snapshot_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_snapshot( + pubsub.DeleteSnapshotRequest(), + snapshot="snapshot_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.SeekRequest, + dict, + ], +) +def test_seek(request_type, transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.seek), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.SeekResponse() + response = client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = pubsub.SeekRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.SeekResponse) + + +def test_seek_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.SeekRequest( + subscription="subscription_value", + snapshot="snapshot_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.seek), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.seek(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.SeekRequest( + subscription="subscription_value", + snapshot="snapshot_value", + ) + + +def test_seek_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.seek in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.seek] = mock_rpc + request = {} + client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.seek(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_seek_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.seek in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.seek + ] = mock_rpc + + request = {} + await client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.seek(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_seek_async( + transport: str = "grpc_asyncio", request_type=pubsub.SeekRequest +): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.seek), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse()) + response = await client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = pubsub.SeekRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.SeekResponse) + + +@pytest.mark.asyncio +async def test_seek_async_from_dict(): + await test_seek_async(request_type=dict) + + +def test_seek_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.SeekRequest() + + request.subscription = "subscription_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.seek), "__call__") as call: + call.return_value = pubsub.SeekResponse() + client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription=subscription_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_seek_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.SeekRequest() + + request.subscription = "subscription_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.seek), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse()) + await client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription=subscription_value", + ) in kw["metadata"] + + +def test_create_subscription_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_subscription in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_subscription + ] = mock_rpc + + request = {} + client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_subscription_rest_required_fields(request_type=pubsub.Subscription): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["name"] = "" + request_init["topic"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["topic"] = "topic_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "topic" in jsonified_request + assert jsonified_request["topic"] == "topic_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "put", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_subscription(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_subscription_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_subscription._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "topic", + ) + ) + ) + + +def test_create_subscription_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/subscriptions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + topic="topic_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ack_deadline_seconds=2066, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_subscription(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/subscriptions/*}" % client.transport._host, args[1] + ) + + +def test_create_subscription_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_subscription( + pubsub.Subscription(), + name="name_value", + topic="topic_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ack_deadline_seconds=2066, + ) + + +def test_get_subscription_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_subscription in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_subscription + ] = mock_rpc + + request = {} + client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_subscription_rest_required_fields( + request_type=pubsub.GetSubscriptionRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["subscription"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["subscription"] = "subscription_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_subscription(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_subscription_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_subscription._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("subscription",))) + + +def test_get_subscription_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription() + + # get arguments that satisfy an http rule for this method + sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + subscription="subscription_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_subscription(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{subscription=projects/*/subscriptions/*}" % client.transport._host, + args[1], + ) + + +def test_get_subscription_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_subscription( + pubsub.GetSubscriptionRequest(), + subscription="subscription_value", + ) + + +def test_update_subscription_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_subscription in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_subscription + ] = mock_rpc + + request = {} + client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_subscription_rest_required_fields( + request_type=pubsub.UpdateSubscriptionRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_subscription(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_subscription_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_subscription._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "subscription", + "updateMask", + ) + ) + ) + + +def test_update_subscription_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription() + + # get arguments that satisfy an http rule for this method + sample_request = { + "subscription": {"name": "projects/sample1/subscriptions/sample2"} + } + + # get truthy value for each flattened field + mock_args = dict( + subscription=pubsub.Subscription(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_subscription(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{subscription.name=projects/*/subscriptions/*}" + % client.transport._host, + args[1], + ) + + +def test_update_subscription_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_subscription( + pubsub.UpdateSubscriptionRequest(), + subscription=pubsub.Subscription(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_list_subscriptions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_subscriptions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_subscriptions + ] = mock_rpc + + request = {} + client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_subscriptions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_subscriptions_rest_required_fields( + request_type=pubsub.ListSubscriptionsRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_subscriptions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_subscriptions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.ListSubscriptionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.ListSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_subscriptions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_subscriptions_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_subscriptions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("project",)) + ) + + +def test_list_subscriptions_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListSubscriptionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pubsub.ListSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_subscriptions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{project=projects/*}/subscriptions" % client.transport._host, args[1] + ) + + +def test_list_subscriptions_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_subscriptions( + pubsub.ListSubscriptionsRequest(), + project="project_value", + ) + + +def test_list_subscriptions_rest_pager(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token="abc", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[], + next_page_token="def", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + ], + next_page_token="ghi", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(pubsub.ListSubscriptionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "projects/sample1"} + + pager = client.list_subscriptions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, pubsub.Subscription) for i in results) + + pages = list(client.list_subscriptions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_delete_subscription_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_subscription in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_subscription + ] = mock_rpc + + request = {} + client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_subscription_rest_required_fields( + request_type=pubsub.DeleteSubscriptionRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["subscription"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["subscription"] = "subscription_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_subscription(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_subscription_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_subscription._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("subscription",))) + + +def test_delete_subscription_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + subscription="subscription_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_subscription(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{subscription=projects/*/subscriptions/*}" % client.transport._host, + args[1], + ) + + +def test_delete_subscription_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_subscription( + pubsub.DeleteSubscriptionRequest(), + subscription="subscription_value", + ) + + +def test_modify_ack_deadline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.modify_ack_deadline in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.modify_ack_deadline + ] = mock_rpc + + request = {} + client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.modify_ack_deadline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_modify_ack_deadline_rest_required_fields( + request_type=pubsub.ModifyAckDeadlineRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["subscription"] = "" + request_init["ack_ids"] = "" + request_init["ack_deadline_seconds"] = 0 + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).modify_ack_deadline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["subscription"] = "subscription_value" + jsonified_request["ackIds"] = "ack_ids_value" + jsonified_request["ackDeadlineSeconds"] = 2066 + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).modify_ack_deadline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + assert "ackIds" in jsonified_request + assert jsonified_request["ackIds"] == "ack_ids_value" + assert "ackDeadlineSeconds" in jsonified_request + assert jsonified_request["ackDeadlineSeconds"] == 2066 + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.modify_ack_deadline(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_modify_ack_deadline_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.modify_ack_deadline._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "subscription", + "ackIds", + "ackDeadlineSeconds", + ) + ) + ) + + +def test_modify_ack_deadline_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ack_deadline_seconds=2066, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.modify_ack_deadline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline" + % client.transport._host, + args[1], + ) + + +def test_modify_ack_deadline_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.modify_ack_deadline( + pubsub.ModifyAckDeadlineRequest(), + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ack_deadline_seconds=2066, + ) + + +def test_acknowledge_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.acknowledge in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.acknowledge] = mock_rpc + + request = {} + client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.acknowledge(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_acknowledge_rest_required_fields(request_type=pubsub.AcknowledgeRequest): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["subscription"] = "" + request_init["ack_ids"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).acknowledge._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["subscription"] = "subscription_value" + jsonified_request["ackIds"] = "ack_ids_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).acknowledge._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + assert "ackIds" in jsonified_request + assert jsonified_request["ackIds"] == "ack_ids_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.acknowledge(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_acknowledge_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.acknowledge._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "subscription", + "ackIds", + ) + ) + ) + + +def test_acknowledge_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.acknowledge(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{subscription=projects/*/subscriptions/*}:acknowledge" + % client.transport._host, + args[1], + ) + + +def test_acknowledge_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.acknowledge( + pubsub.AcknowledgeRequest(), + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ) + + +def test_pull_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.pull in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.pull] = mock_rpc + + request = {} + client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.pull(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_pull_rest_required_fields(request_type=pubsub.PullRequest): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["subscription"] = "" + request_init["max_messages"] = 0 + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pull._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["subscription"] = "subscription_value" + jsonified_request["maxMessages"] = 1277 + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pull._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + assert "maxMessages" in jsonified_request + assert jsonified_request["maxMessages"] == 1277 + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.PullResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.PullResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.pull(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_pull_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.pull._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "subscription", + "maxMessages", + ) + ) + ) + + +def test_pull_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.PullResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pubsub.PullResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.pull(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{subscription=projects/*/subscriptions/*}:pull" + % client.transport._host, + args[1], + ) + + +def test_pull_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pull( + pubsub.PullRequest(), + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) + + +def test_streaming_pull_rest_no_http_options(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = pubsub.StreamingPullRequest() + requests = [request] + with pytest.raises(RuntimeError): + client.streaming_pull(requests) + + +def test_modify_push_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.modify_push_config in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.modify_push_config + ] = mock_rpc + + request = {} + client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.modify_push_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_modify_push_config_rest_required_fields( + request_type=pubsub.ModifyPushConfigRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["subscription"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).modify_push_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["subscription"] = "subscription_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).modify_push_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.modify_push_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_modify_push_config_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.modify_push_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "subscription", + "pushConfig", + ) + ) + ) + + +def test_modify_push_config_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.modify_push_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig" + % client.transport._host, + args[1], + ) + + +def test_modify_push_config_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.modify_push_config( + pubsub.ModifyPushConfigRequest(), + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ) + + +def test_get_snapshot_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_snapshot] = mock_rpc + + request = {} + client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_snapshot_rest_required_fields(request_type=pubsub.GetSnapshotRequest): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["snapshot"] = "snapshot_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == "snapshot_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_snapshot(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_snapshot_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("snapshot",))) + + +def test_get_snapshot_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot() + + # get arguments that satisfy an http rule for this method + sample_request = {"snapshot": "projects/sample1/snapshots/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + snapshot="snapshot_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_snapshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{snapshot=projects/*/snapshots/*}" % client.transport._host, args[1] + ) + + +def test_get_snapshot_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_snapshot( + pubsub.GetSnapshotRequest(), + snapshot="snapshot_value", + ) + + +def test_list_snapshots_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_snapshots in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_snapshots] = mock_rpc + + request = {} + client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_snapshots(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_snapshots_rest_required_fields(request_type=pubsub.ListSnapshotsRequest): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_snapshots._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_snapshots._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.ListSnapshotsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.ListSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_snapshots(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_snapshots_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_snapshots._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("project",)) + ) + + +def test_list_snapshots_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListSnapshotsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pubsub.ListSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_snapshots(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{project=projects/*}/snapshots" % client.transport._host, args[1] + ) + + +def test_list_snapshots_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_snapshots( + pubsub.ListSnapshotsRequest(), + project="project_value", + ) + + +def test_list_snapshots_rest_pager(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + next_page_token="abc", + ), + pubsub.ListSnapshotsResponse( + snapshots=[], + next_page_token="def", + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + ], + next_page_token="ghi", + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(pubsub.ListSnapshotsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "projects/sample1"} + + pager = client.list_snapshots(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, pubsub.Snapshot) for i in results) + + pages = list(client.list_snapshots(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_snapshot_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_snapshot] = mock_rpc + + request = {} + client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_snapshot_rest_required_fields( + request_type=pubsub.CreateSnapshotRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["name"] = "" + request_init["subscription"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["subscription"] = "subscription_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "put", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_snapshot(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_snapshot_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "subscription", + ) + ) + ) + + +def test_create_snapshot_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/snapshots/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + subscription="subscription_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_snapshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/snapshots/*}" % client.transport._host, args[1] + ) + + +def test_create_snapshot_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_snapshot( + pubsub.CreateSnapshotRequest(), + name="name_value", + subscription="subscription_value", + ) + + +def test_update_snapshot_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_snapshot] = mock_rpc + + request = {} + client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_snapshot_rest_required_fields( + request_type=pubsub.UpdateSnapshotRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_snapshot(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_snapshot_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "snapshot", + "updateMask", + ) + ) + ) + + +def test_update_snapshot_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot() + + # get arguments that satisfy an http rule for this method + sample_request = {"snapshot": {"name": "projects/sample1/snapshots/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + snapshot=pubsub.Snapshot(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_snapshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{snapshot.name=projects/*/snapshots/*}" % client.transport._host, + args[1], + ) + + +def test_update_snapshot_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_snapshot( + pubsub.UpdateSnapshotRequest(), + snapshot=pubsub.Snapshot(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_snapshot_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_snapshot] = mock_rpc + + request = {} + client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_snapshot_rest_required_fields( + request_type=pubsub.DeleteSnapshotRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["snapshot"] = "snapshot_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == "snapshot_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_snapshot(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_snapshot_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("snapshot",))) + + +def test_delete_snapshot_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"snapshot": "projects/sample1/snapshots/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + snapshot="snapshot_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_snapshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{snapshot=projects/*/snapshots/*}" % client.transport._host, args[1] + ) + + +def test_delete_snapshot_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_snapshot( + pubsub.DeleteSnapshotRequest(), + snapshot="snapshot_value", + ) + + +def test_seek_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.seek in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.seek] = mock_rpc + + request = {} + client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.seek(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_seek_rest_required_fields(request_type=pubsub.SeekRequest): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["subscription"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).seek._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["subscription"] = "subscription_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).seek._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.SeekResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.SeekResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.seek(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_seek_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.seek._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("subscription",))) + + +def test_streaming_pull_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.streaming_pull({}) + assert "Method StreamingPull is not available over REST transport" in str( + not_implemented_error.value + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubscriberClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SubscriberClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SubscriberClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubscriberClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SubscriberClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SubscriberGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SubscriberGrpcTransport, + transports.SubscriberGrpcAsyncIOTransport, + transports.SubscriberRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = SubscriberClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_subscription_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), "__call__" + ) as call: + call.return_value = pubsub.Subscription() + client.create_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.Subscription() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_subscription_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + call.return_value = pubsub.Subscription() + client.get_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_subscription_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + call.return_value = pubsub.Subscription() + client.update_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_subscriptions_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + call.return_value = pubsub.ListSubscriptionsResponse() + client.list_subscriptions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListSubscriptionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_subscription_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + call.return_value = None + client.delete_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_modify_ack_deadline_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), "__call__" + ) as call: + call.return_value = None + client.modify_ack_deadline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ModifyAckDeadlineRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_acknowledge_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + call.return_value = None + client.acknowledge(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.AcknowledgeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_pull_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.pull), "__call__") as call: + call.return_value = pubsub.PullResponse() + client.pull(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.PullRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_modify_push_config_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), "__call__" + ) as call: + call.return_value = None + client.modify_push_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ModifyPushConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_snapshot_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + call.return_value = pubsub.Snapshot() + client.get_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_snapshots_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + call.return_value = pubsub.ListSnapshotsResponse() + client.list_snapshots(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListSnapshotsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_snapshot_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + call.return_value = pubsub.Snapshot() + client.create_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.CreateSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_snapshot_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + call.return_value = pubsub.Snapshot() + client.update_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_snapshot_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + call.return_value = None + client.delete_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_seek_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.seek), "__call__") as call: + call.return_value = pubsub.SeekResponse() + client.seek(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.SeekRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = SubscriberAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_subscription_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + ) + await client.create_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.Subscription() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_subscription_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + ) + await client.get_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_subscription_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + ) + await client.update_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_subscriptions_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSubscriptionsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_subscriptions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListSubscriptionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_subscription_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_modify_ack_deadline_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.modify_ack_deadline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ModifyAckDeadlineRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_acknowledge_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.acknowledge(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.AcknowledgeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_pull_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) + await client.pull(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.PullRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_modify_push_config_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.modify_push_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ModifyPushConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_snapshot_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + ) + await client.get_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_snapshots_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSnapshotsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_snapshots(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListSnapshotsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_snapshot_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + ) + await client.create_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.CreateSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_snapshot_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + ) + await client.update_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_snapshot_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_seek_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.seek), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse()) + await client.seek(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.SeekRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = SubscriberClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_subscription_rest_bad_request(request_type=pubsub.Subscription): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_subscription(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.Subscription, + dict, + ], +) +def test_create_subscription_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_subscription(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == "name_value" + assert response.topic == "topic_value" + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == "filter_value" + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_subscription_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_create_subscription" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_create_subscription_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_create_subscription" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.Subscription.pb(pubsub.Subscription()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.Subscription.to_json(pubsub.Subscription()) + req.return_value.content = return_value + + request = pubsub.Subscription() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Subscription() + post_with_metadata.return_value = pubsub.Subscription(), metadata + + client.create_subscription( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_subscription_rest_bad_request(request_type=pubsub.GetSubscriptionRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_subscription(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.GetSubscriptionRequest, + dict, + ], +) +def test_get_subscription_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_subscription(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == "name_value" + assert response.topic == "topic_value" + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == "filter_value" + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_subscription_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_get_subscription" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_get_subscription_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_get_subscription" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.GetSubscriptionRequest.pb(pubsub.GetSubscriptionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.Subscription.to_json(pubsub.Subscription()) + req.return_value.content = return_value + + request = pubsub.GetSubscriptionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Subscription() + post_with_metadata.return_value = pubsub.Subscription(), metadata + + client.get_subscription( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_subscription_rest_bad_request( + request_type=pubsub.UpdateSubscriptionRequest, +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": {"name": "projects/sample1/subscriptions/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_subscription(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.UpdateSubscriptionRequest, + dict, + ], +) +def test_update_subscription_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": {"name": "projects/sample1/subscriptions/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_subscription(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == "name_value" + assert response.topic == "topic_value" + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == "filter_value" + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_subscription_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_update_subscription" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_update_subscription_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_update_subscription" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.UpdateSubscriptionRequest.pb( + pubsub.UpdateSubscriptionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.Subscription.to_json(pubsub.Subscription()) + req.return_value.content = return_value + + request = pubsub.UpdateSubscriptionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Subscription() + post_with_metadata.return_value = pubsub.Subscription(), metadata + + client.update_subscription( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_subscriptions_rest_bad_request( + request_type=pubsub.ListSubscriptionsRequest, +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_subscriptions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListSubscriptionsRequest, + dict, + ], +) +def test_list_subscriptions_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListSubscriptionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.ListSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_subscriptions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubscriptionsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_subscriptions_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_list_subscriptions" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_list_subscriptions_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_list_subscriptions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.ListSubscriptionsRequest.pb( + pubsub.ListSubscriptionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.ListSubscriptionsResponse.to_json( + pubsub.ListSubscriptionsResponse() + ) + req.return_value.content = return_value + + request = pubsub.ListSubscriptionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.ListSubscriptionsResponse() + post_with_metadata.return_value = pubsub.ListSubscriptionsResponse(), metadata + + client.list_subscriptions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_subscription_rest_bad_request( + request_type=pubsub.DeleteSubscriptionRequest, +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_subscription(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DeleteSubscriptionRequest, + dict, + ], +) +def test_delete_subscription_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_subscription(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_subscription_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_delete_subscription" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.DeleteSubscriptionRequest.pb( + pubsub.DeleteSubscriptionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = pubsub.DeleteSubscriptionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_subscription( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_modify_ack_deadline_rest_bad_request( + request_type=pubsub.ModifyAckDeadlineRequest, +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.modify_ack_deadline(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ModifyAckDeadlineRequest, + dict, + ], +) +def test_modify_ack_deadline_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.modify_ack_deadline(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_modify_ack_deadline_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_modify_ack_deadline" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.ModifyAckDeadlineRequest.pb( + pubsub.ModifyAckDeadlineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = pubsub.ModifyAckDeadlineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.modify_ack_deadline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_acknowledge_rest_bad_request(request_type=pubsub.AcknowledgeRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.acknowledge(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.AcknowledgeRequest, + dict, + ], +) +def test_acknowledge_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.acknowledge(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_acknowledge_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_acknowledge" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.AcknowledgeRequest.pb(pubsub.AcknowledgeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = pubsub.AcknowledgeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.acknowledge( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_pull_rest_bad_request(request_type=pubsub.PullRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.pull(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.PullRequest, + dict, + ], +) +def test_pull_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.PullResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.PullResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.pull(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PullResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_pull_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_pull" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_pull_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_pull" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.PullRequest.pb(pubsub.PullRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.PullResponse.to_json(pubsub.PullResponse()) + req.return_value.content = return_value + + request = pubsub.PullRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.PullResponse() + post_with_metadata.return_value = pubsub.PullResponse(), metadata + + client.pull( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_streaming_pull_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + with pytest.raises(NotImplementedError) as not_implemented_error: + client.streaming_pull({}) + assert "Method StreamingPull is not available over REST transport" in str( + not_implemented_error.value + ) + + +def test_modify_push_config_rest_bad_request( + request_type=pubsub.ModifyPushConfigRequest, +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.modify_push_config(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ModifyPushConfigRequest, + dict, + ], +) +def test_modify_push_config_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.modify_push_config(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_modify_push_config_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_modify_push_config" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.ModifyPushConfigRequest.pb(pubsub.ModifyPushConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = pubsub.ModifyPushConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.modify_push_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_get_snapshot_rest_bad_request(request_type=pubsub.GetSnapshotRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"snapshot": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_snapshot(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.GetSnapshotRequest, + dict, + ], +) +def test_get_snapshot_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"snapshot": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == "name_value" + assert response.topic == "topic_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_snapshot_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_get_snapshot" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_get_snapshot_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_get_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.GetSnapshotRequest.pb(pubsub.GetSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.Snapshot.to_json(pubsub.Snapshot()) + req.return_value.content = return_value + + request = pubsub.GetSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Snapshot() + post_with_metadata.return_value = pubsub.Snapshot(), metadata + + client.get_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_snapshots_rest_bad_request(request_type=pubsub.ListSnapshotsRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_snapshots(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListSnapshotsRequest, + dict, + ], +) +def test_list_snapshots_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListSnapshotsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.ListSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_snapshots(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSnapshotsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_snapshots_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_list_snapshots" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_list_snapshots_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_list_snapshots" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.ListSnapshotsRequest.pb(pubsub.ListSnapshotsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.ListSnapshotsResponse.to_json( + pubsub.ListSnapshotsResponse() + ) + req.return_value.content = return_value + + request = pubsub.ListSnapshotsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.ListSnapshotsResponse() + post_with_metadata.return_value = pubsub.ListSnapshotsResponse(), metadata + + client.list_snapshots( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_snapshot_rest_bad_request(request_type=pubsub.CreateSnapshotRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_snapshot(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.CreateSnapshotRequest, + dict, + ], +) +def test_create_snapshot_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == "name_value" + assert response.topic == "topic_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_snapshot_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_create_snapshot" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_create_snapshot_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_create_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.CreateSnapshotRequest.pb(pubsub.CreateSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.Snapshot.to_json(pubsub.Snapshot()) + req.return_value.content = return_value + + request = pubsub.CreateSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Snapshot() + post_with_metadata.return_value = pubsub.Snapshot(), metadata + + client.create_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_snapshot_rest_bad_request(request_type=pubsub.UpdateSnapshotRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"snapshot": {"name": "projects/sample1/snapshots/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_snapshot(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.UpdateSnapshotRequest, + dict, + ], +) +def test_update_snapshot_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"snapshot": {"name": "projects/sample1/snapshots/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == "name_value" + assert response.topic == "topic_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_snapshot_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_update_snapshot" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_update_snapshot_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_update_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.UpdateSnapshotRequest.pb(pubsub.UpdateSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.Snapshot.to_json(pubsub.Snapshot()) + req.return_value.content = return_value + + request = pubsub.UpdateSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Snapshot() + post_with_metadata.return_value = pubsub.Snapshot(), metadata + + client.update_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_snapshot_rest_bad_request(request_type=pubsub.DeleteSnapshotRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"snapshot": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_snapshot(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DeleteSnapshotRequest, + dict, + ], +) +def test_delete_snapshot_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"snapshot": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_snapshot(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_snapshot_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_delete_snapshot" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.DeleteSnapshotRequest.pb(pubsub.DeleteSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = pubsub.DeleteSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_seek_rest_bad_request(request_type=pubsub.SeekRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.seek(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.SeekRequest, + dict, + ], +) +def test_seek_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.SeekResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.SeekResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.seek(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.SeekResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_seek_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_seek" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_seek_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_seek" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = pubsub.SeekRequest.pb(pubsub.SeekRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = pubsub.SeekResponse.to_json(pubsub.SeekResponse()) + req.return_value.content = return_value + + request = pubsub.SeekRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.SeekResponse() + post_with_metadata.return_value = pubsub.SeekResponse(), metadata + + client.seek( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/subscriptions/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_initialize_client_w_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_subscription_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), "__call__" + ) as call: + client.create_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.Subscription() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_subscription_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + client.get_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_subscription_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + client.update_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_subscriptions_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + client.list_subscriptions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListSubscriptionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_subscription_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + client.delete_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_modify_ack_deadline_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), "__call__" + ) as call: + client.modify_ack_deadline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ModifyAckDeadlineRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_acknowledge_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + client.acknowledge(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.AcknowledgeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_pull_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.pull), "__call__") as call: + client.pull(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.PullRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_modify_push_config_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), "__call__" + ) as call: + client.modify_push_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ModifyPushConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_snapshot_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + client.get_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_snapshots_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + client.list_snapshots(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListSnapshotsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_snapshot_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + client.create_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.CreateSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_snapshot_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + client.update_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_snapshot_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + client.delete_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_seek_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.seek), "__call__") as call: + client.seek(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.SeekRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SubscriberGrpcTransport, + ) + + +def test_subscriber_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SubscriberTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_subscriber_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.pubsub_v1.services.subscriber.transports.SubscriberTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SubscriberTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_subscription", + "get_subscription", + "update_subscription", + "list_subscriptions", + "delete_subscription", + "modify_ack_deadline", + "acknowledge", + "pull", + "streaming_pull", + "modify_push_config", + "get_snapshot", + "list_snapshots", + "create_snapshot", + "update_snapshot", + "delete_snapshot", + "seek", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_subscriber_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.pubsub_v1.services.subscriber.transports.SubscriberTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SubscriberTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +def test_subscriber_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.pubsub_v1.services.subscriber.transports.SubscriberTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SubscriberTransport() + adc.assert_called_once() + + +def test_subscriber_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SubscriberClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SubscriberGrpcTransport, + transports.SubscriberGrpcAsyncIOTransport, + ], +) +def test_subscriber_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SubscriberGrpcTransport, + transports.SubscriberGrpcAsyncIOTransport, + transports.SubscriberRestTransport, + ], +) +def test_subscriber_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SubscriberGrpcTransport, grpc_helpers), + (transports.SubscriberGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_subscriber_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + scopes=["1", "2"], + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport], +) +def test_subscriber_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_subscriber_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SubscriberRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_subscriber_host_no_port(transport_name): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="pubsub.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_subscriber_host_with_port(transport_name): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="pubsub.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "pubsub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_subscriber_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SubscriberClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SubscriberClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_subscription._session + session2 = client2.transport.create_subscription._session + assert session1 != session2 + session1 = client1.transport.get_subscription._session + session2 = client2.transport.get_subscription._session + assert session1 != session2 + session1 = client1.transport.update_subscription._session + session2 = client2.transport.update_subscription._session + assert session1 != session2 + session1 = client1.transport.list_subscriptions._session + session2 = client2.transport.list_subscriptions._session + assert session1 != session2 + session1 = client1.transport.delete_subscription._session + session2 = client2.transport.delete_subscription._session + assert session1 != session2 + session1 = client1.transport.modify_ack_deadline._session + session2 = client2.transport.modify_ack_deadline._session + assert session1 != session2 + session1 = client1.transport.acknowledge._session + session2 = client2.transport.acknowledge._session + assert session1 != session2 + session1 = client1.transport.pull._session + session2 = client2.transport.pull._session + assert session1 != session2 + session1 = client1.transport.streaming_pull._session + session2 = client2.transport.streaming_pull._session + assert session1 != session2 + session1 = client1.transport.modify_push_config._session + session2 = client2.transport.modify_push_config._session + assert session1 != session2 + session1 = client1.transport.get_snapshot._session + session2 = client2.transport.get_snapshot._session + assert session1 != session2 + session1 = client1.transport.list_snapshots._session + session2 = client2.transport.list_snapshots._session + assert session1 != session2 + session1 = client1.transport.create_snapshot._session + session2 = client2.transport.create_snapshot._session + assert session1 != session2 + session1 = client1.transport.update_snapshot._session + session2 = client2.transport.update_snapshot._session + assert session1 != session2 + session1 = client1.transport.delete_snapshot._session + session2 = client2.transport.delete_snapshot._session + assert session1 != session2 + session1 = client1.transport.seek._session + session2 = client2.transport.seek._session + assert session1 != session2 + + +def test_subscriber_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SubscriberGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_subscriber_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SubscriberGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") +@pytest.mark.parametrize( + "transport_class", + [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport], +) +def test_subscriber_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport], +) +def test_subscriber_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_listing_path(): + project = "squid" + location = "clam" + data_exchange = "whelk" + listing = "octopus" + expected = "projects/{project}/locations/{location}/dataExchanges/{data_exchange}/listings/{listing}".format( + project=project, + location=location, + data_exchange=data_exchange, + listing=listing, + ) + actual = SubscriberClient.listing_path(project, location, data_exchange, listing) + assert expected == actual + + +def test_parse_listing_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "data_exchange": "cuttlefish", + "listing": "mussel", + } + path = SubscriberClient.listing_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_listing_path(path) + assert expected == actual + + +def test_snapshot_path(): + project = "winkle" + snapshot = "nautilus" + expected = "projects/{project}/snapshots/{snapshot}".format( + project=project, + snapshot=snapshot, + ) + actual = SubscriberClient.snapshot_path(project, snapshot) + assert expected == actual + + +def test_parse_snapshot_path(): + expected = { + "project": "scallop", + "snapshot": "abalone", + } + path = SubscriberClient.snapshot_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_snapshot_path(path) + assert expected == actual + + +def test_subscription_path(): + project = "squid" + subscription = "clam" + expected = "projects/{project}/subscriptions/{subscription}".format( + project=project, + subscription=subscription, + ) + actual = SubscriberClient.subscription_path(project, subscription) + assert expected == actual + + +def test_parse_subscription_path(): + expected = { + "project": "whelk", + "subscription": "octopus", + } + path = SubscriberClient.subscription_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_subscription_path(path) + assert expected == actual + + +def test_topic_path(): + project = "oyster" + topic = "nudibranch" + expected = "projects/{project}/topics/{topic}".format( + project=project, + topic=topic, + ) + actual = SubscriberClient.topic_path(project, topic) + assert expected == actual + + +def test_parse_topic_path(): + expected = { + "project": "cuttlefish", + "topic": "mussel", + } + path = SubscriberClient.topic_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_topic_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "winkle" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SubscriberClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = SubscriberClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "scallop" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = SubscriberClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = SubscriberClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "squid" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = SubscriberClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = SubscriberClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "whelk" + expected = "projects/{project}".format( + project=project, + ) + actual = SubscriberClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = SubscriberClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = SubscriberClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = SubscriberClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SubscriberTransport, "_prep_wrapped_messages" + ) as prep: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SubscriberTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SubscriberClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_set_iam_policy(transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (SubscriberClient, transports.SubscriberGrpcTransport), + (SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/tests/unit/gapic/v1/test_publisher_client_v1.py b/tests/unit/gapic/v1/test_publisher_client_v1.py deleted file mode 100644 index ad4f38d8b..000000000 --- a/tests/unit/gapic/v1/test_publisher_client_v1.py +++ /dev/null @@ -1,560 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud.pubsub_v1.gapic import publisher_client -from google.cloud.pubsub_v1.proto import pubsub_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestPublisherClient(object): - def test_create_topic(self): - # Setup Expected Response - name_2 = "name2-1052831874" - kms_key_name = "kmsKeyName2094986649" - expected_response = {"name": name_2, "kms_key_name": kms_key_name} - expected_response = pubsub_pb2.Topic(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - name = client.topic_path("[PROJECT]", "[TOPIC]") - - response = client.create_topic(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.Topic(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_topic_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - name = client.topic_path("[PROJECT]", "[TOPIC]") - - with pytest.raises(CustomException): - client.create_topic(name) - - def test_update_topic(self): - # Setup Expected Response - name = "name3373707" - kms_key_name = "kmsKeyName2094986649" - expected_response = {"name": name, "kms_key_name": kms_key_name} - expected_response = pubsub_pb2.Topic(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = {} - update_mask = {} - - response = client.update_topic(topic, update_mask) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.UpdateTopicRequest( - topic=topic, update_mask=update_mask - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_topic_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = {} - update_mask = {} - - with pytest.raises(CustomException): - client.update_topic(topic, update_mask) - - def test_publish(self): - # Setup Expected Response - message_ids_element = "messageIdsElement-744837059" - message_ids = [message_ids_element] - expected_response = {"message_ids": message_ids} - expected_response = pubsub_pb2.PublishResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - data = b"-86" - messages_element = {"data": data} - messages = [messages_element] - - response = client.publish(topic, messages) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.PublishRequest(topic=topic, messages=messages) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_publish_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - data = b"-86" - messages_element = {"data": data} - messages = [messages_element] - - with pytest.raises(CustomException): - client.publish(topic, messages) - - def test_get_topic(self): - # Setup Expected Response - name = "name3373707" - kms_key_name = "kmsKeyName2094986649" - expected_response = {"name": name, "kms_key_name": kms_key_name} - expected_response = pubsub_pb2.Topic(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - response = client.get_topic(topic) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.GetTopicRequest(topic=topic) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_topic_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - with pytest.raises(CustomException): - client.get_topic(topic) - - def test_list_topics(self): - # Setup Expected Response - next_page_token = "" - topics_element = {} - topics = [topics_element] - expected_response = {"next_page_token": next_page_token, "topics": topics} - expected_response = pubsub_pb2.ListTopicsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_topics(project) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.topics[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ListTopicsRequest(project=project) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_topics_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_topics(project) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_list_topic_subscriptions(self): - # Setup Expected Response - next_page_token = "" - subscriptions_element = "subscriptionsElement1698708147" - subscriptions = [subscriptions_element] - expected_response = { - "next_page_token": next_page_token, - "subscriptions": subscriptions, - } - expected_response = pubsub_pb2.ListTopicSubscriptionsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - paged_list_response = client.list_topic_subscriptions(topic) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.subscriptions[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ListTopicSubscriptionsRequest(topic=topic) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_topic_subscriptions_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - paged_list_response = client.list_topic_subscriptions(topic) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_list_topic_snapshots(self): - # Setup Expected Response - next_page_token = "" - snapshots_element = "snapshotsElement1339034092" - snapshots = [snapshots_element] - expected_response = {"next_page_token": next_page_token, "snapshots": snapshots} - expected_response = pubsub_pb2.ListTopicSnapshotsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - paged_list_response = client.list_topic_snapshots(topic) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.snapshots[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ListTopicSnapshotsRequest(topic=topic) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_topic_snapshots_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - paged_list_response = client.list_topic_snapshots(topic) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_topic(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - client.delete_topic(topic) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.DeleteTopicRequest(topic=topic) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_topic_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - with pytest.raises(CustomException): - client.delete_topic(topic) - - def test_set_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - resource = "resource-341064690" - policy = {} - - response = client.set_iam_policy(resource, policy) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, policy=policy - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_set_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - resource = "resource-341064690" - policy = {} - - with pytest.raises(CustomException): - client.set_iam_policy(resource, policy) - - def test_get_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - resource = "resource-341064690" - - response = client.get_iam_policy(resource) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - resource = "resource-341064690" - - with pytest.raises(CustomException): - client.get_iam_policy(resource) - - def test_test_iam_permissions(self): - # Setup Expected Response - expected_response = {} - expected_response = iam_policy_pb2.TestIamPermissionsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - resource = "resource-341064690" - permissions = [] - - response = client.test_iam_permissions(resource, permissions) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_test_iam_permissions_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - resource = "resource-341064690" - permissions = [] - - with pytest.raises(CustomException): - client.test_iam_permissions(resource, permissions) - - def test_detach_subscription(self): - # Setup Expected Response - expected_response = {} - expected_response = pubsub_pb2.DetachSubscriptionResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - response = client.detach_subscription(subscription) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.DetachSubscriptionRequest( - subscription=subscription - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_detach_subscription_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - with pytest.raises(CustomException): - client.detach_subscription(subscription) diff --git a/tests/unit/gapic/v1/test_subscriber_client_v1.py b/tests/unit/gapic/v1/test_subscriber_client_v1.py deleted file mode 100644 index b059214d7..000000000 --- a/tests/unit/gapic/v1/test_subscriber_client_v1.py +++ /dev/null @@ -1,892 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud.pubsub_v1.gapic import subscriber_client -from google.cloud.pubsub_v1.proto import pubsub_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - def stream_stream( - self, method, request_serializer=None, response_deserializer=None - ): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestSubscriberClient(object): - def test_create_subscription(self): - # Setup Expected Response - name_2 = "name2-1052831874" - topic_2 = "topic2-1139259102" - ack_deadline_seconds = 2135351438 - retain_acked_messages = False - enable_message_ordering = True - filter_ = "filter-1274492040" - detached = True - expected_response = { - "name": name_2, - "topic": topic_2, - "ack_deadline_seconds": ack_deadline_seconds, - "retain_acked_messages": retain_acked_messages, - "enable_message_ordering": enable_message_ordering, - "filter": filter_, - "detached": detached, - } - expected_response = pubsub_pb2.Subscription(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - name = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - response = client.create_subscription(name, topic) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.Subscription(name=name, topic=topic) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_subscription_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - name = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - with pytest.raises(CustomException): - client.create_subscription(name, topic) - - def test_get_subscription(self): - # Setup Expected Response - name = "name3373707" - topic = "topic110546223" - ack_deadline_seconds = 2135351438 - retain_acked_messages = False - enable_message_ordering = True - filter_ = "filter-1274492040" - detached = True - expected_response = { - "name": name, - "topic": topic, - "ack_deadline_seconds": ack_deadline_seconds, - "retain_acked_messages": retain_acked_messages, - "enable_message_ordering": enable_message_ordering, - "filter": filter_, - "detached": detached, - } - expected_response = pubsub_pb2.Subscription(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - response = client.get_subscription(subscription) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_subscription_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - with pytest.raises(CustomException): - client.get_subscription(subscription) - - def test_update_subscription(self): - # Setup Expected Response - name = "name3373707" - topic = "topic110546223" - ack_deadline_seconds_2 = 921632575 - retain_acked_messages = False - enable_message_ordering = True - filter_ = "filter-1274492040" - detached = True - expected_response = { - "name": name, - "topic": topic, - "ack_deadline_seconds": ack_deadline_seconds_2, - "retain_acked_messages": retain_acked_messages, - "enable_message_ordering": enable_message_ordering, - "filter": filter_, - "detached": detached, - } - expected_response = pubsub_pb2.Subscription(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - ack_deadline_seconds = 42 - subscription = {"ack_deadline_seconds": ack_deadline_seconds} - paths_element = "ack_deadline_seconds" - paths = [paths_element] - update_mask = {"paths": paths} - - response = client.update_subscription(subscription, update_mask) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.UpdateSubscriptionRequest( - subscription=subscription, update_mask=update_mask - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_subscription_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - ack_deadline_seconds = 42 - subscription = {"ack_deadline_seconds": ack_deadline_seconds} - paths_element = "ack_deadline_seconds" - paths = [paths_element] - update_mask = {"paths": paths} - - with pytest.raises(CustomException): - client.update_subscription(subscription, update_mask) - - def test_list_subscriptions(self): - # Setup Expected Response - next_page_token = "" - subscriptions_element = {} - subscriptions = [subscriptions_element] - expected_response = { - "next_page_token": next_page_token, - "subscriptions": subscriptions, - } - expected_response = pubsub_pb2.ListSubscriptionsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_subscriptions(project) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.subscriptions[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ListSubscriptionsRequest(project=project) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_subscriptions_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_subscriptions(project) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_subscription(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - client.delete_subscription(subscription) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.DeleteSubscriptionRequest( - subscription=subscription - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_subscription_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - with pytest.raises(CustomException): - client.delete_subscription(subscription) - - def test_get_snapshot(self): - # Setup Expected Response - name = "name3373707" - topic = "topic110546223" - expected_response = {"name": name, "topic": topic} - expected_response = pubsub_pb2.Snapshot(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - - response = client.get_snapshot(snapshot) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.GetSnapshotRequest(snapshot=snapshot) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_snapshot_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - - with pytest.raises(CustomException): - client.get_snapshot(snapshot) - - def test_modify_ack_deadline(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - ack_ids = [] - ack_deadline_seconds = 2135351438 - - client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ModifyAckDeadlineRequest( - subscription=subscription, - ack_ids=ack_ids, - ack_deadline_seconds=ack_deadline_seconds, - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_modify_ack_deadline_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - ack_ids = [] - ack_deadline_seconds = 2135351438 - - with pytest.raises(CustomException): - client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) - - def test_acknowledge(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - ack_ids = [] - - client.acknowledge(subscription, ack_ids) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.AcknowledgeRequest( - subscription=subscription, ack_ids=ack_ids - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_acknowledge_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - ack_ids = [] - - with pytest.raises(CustomException): - client.acknowledge(subscription, ack_ids) - - def test_pull(self): - # Setup Expected Response - expected_response = {} - expected_response = pubsub_pb2.PullResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - max_messages = 496131527 - - response = client.pull(subscription, max_messages) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.PullRequest( - subscription=subscription, max_messages=max_messages - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_pull_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - max_messages = 496131527 - - with pytest.raises(CustomException): - client.pull(subscription, max_messages) - - def test_streaming_pull(self): - # Setup Expected Response - expected_response = {} - expected_response = pubsub_pb2.StreamingPullResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[iter([expected_response])]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - stream_ack_deadline_seconds = 1875467245 - request = { - "subscription": subscription, - "stream_ack_deadline_seconds": stream_ack_deadline_seconds, - } - request = pubsub_pb2.StreamingPullRequest(**request) - requests = [request] - - response = client.streaming_pull(requests) - resources = list(response) - assert len(resources) == 1 - assert expected_response == resources[0] - - assert len(channel.requests) == 1 - actual_requests = channel.requests[0][1] - assert len(actual_requests) == 1 - actual_request = list(actual_requests)[0] - assert request == actual_request - - def test_streaming_pull_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - stream_ack_deadline_seconds = 1875467245 - request = { - "subscription": subscription, - "stream_ack_deadline_seconds": stream_ack_deadline_seconds, - } - - request = pubsub_pb2.StreamingPullRequest(**request) - requests = [request] - - with pytest.raises(CustomException): - client.streaming_pull(requests) - - def test_modify_push_config(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - push_config = {} - - client.modify_push_config(subscription, push_config) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ModifyPushConfigRequest( - subscription=subscription, push_config=push_config - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_modify_push_config_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - push_config = {} - - with pytest.raises(CustomException): - client.modify_push_config(subscription, push_config) - - def test_list_snapshots(self): - # Setup Expected Response - next_page_token = "" - snapshots_element = {} - snapshots = [snapshots_element] - expected_response = {"next_page_token": next_page_token, "snapshots": snapshots} - expected_response = pubsub_pb2.ListSnapshotsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_snapshots(project) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.snapshots[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ListSnapshotsRequest(project=project) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_snapshots_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_snapshots(project) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_create_snapshot(self): - # Setup Expected Response - name_2 = "name2-1052831874" - topic = "topic110546223" - expected_response = {"name": name_2, "topic": topic} - expected_response = pubsub_pb2.Snapshot(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - name = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - response = client.create_snapshot(name, subscription) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.CreateSnapshotRequest( - name=name, subscription=subscription - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_snapshot_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - name = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - with pytest.raises(CustomException): - client.create_snapshot(name, subscription) - - def test_update_snapshot(self): - # Setup Expected Response - name = "name3373707" - topic = "topic110546223" - expected_response = {"name": name, "topic": topic} - expected_response = pubsub_pb2.Snapshot(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - seconds = 123456 - expire_time = {"seconds": seconds} - snapshot = {"expire_time": expire_time} - paths_element = "expire_time" - paths = [paths_element] - update_mask = {"paths": paths} - - response = client.update_snapshot(snapshot, update_mask) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.UpdateSnapshotRequest( - snapshot=snapshot, update_mask=update_mask - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_snapshot_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - seconds = 123456 - expire_time = {"seconds": seconds} - snapshot = {"expire_time": expire_time} - paths_element = "expire_time" - paths = [paths_element] - update_mask = {"paths": paths} - - with pytest.raises(CustomException): - client.update_snapshot(snapshot, update_mask) - - def test_delete_snapshot(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - - client.delete_snapshot(snapshot) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_snapshot_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - - with pytest.raises(CustomException): - client.delete_snapshot(snapshot) - - def test_seek(self): - # Setup Expected Response - expected_response = {} - expected_response = pubsub_pb2.SeekResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - response = client.seek(subscription) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.SeekRequest(subscription=subscription) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_seek_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - with pytest.raises(CustomException): - client.seek(subscription) - - def test_set_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - resource = "resource-341064690" - policy = {} - - response = client.set_iam_policy(resource, policy) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, policy=policy - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_set_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - resource = "resource-341064690" - policy = {} - - with pytest.raises(CustomException): - client.set_iam_policy(resource, policy) - - def test_get_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - resource = "resource-341064690" - - response = client.get_iam_policy(resource) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - resource = "resource-341064690" - - with pytest.raises(CustomException): - client.get_iam_policy(resource) - - def test_test_iam_permissions(self): - # Setup Expected Response - expected_response = {} - expected_response = iam_policy_pb2.TestIamPermissionsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - resource = "resource-341064690" - permissions = [] - - response = client.test_iam_permissions(resource, permissions) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_test_iam_permissions_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - resource = "resource-341064690" - permissions = [] - - with pytest.raises(CustomException): - client.test_iam_permissions(resource, permissions) diff --git a/tests/unit/pubsub_v1/conftest.py b/tests/unit/pubsub_v1/conftest.py new file mode 100644 index 000000000..ab73ab26c --- /dev/null +++ b/tests/unit/pubsub_v1/conftest.py @@ -0,0 +1,62 @@ +# Copyright 2021 Google LLC + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# https://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import pytest + +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter +from opentelemetry.sdk.trace.export import SimpleSpanProcessor +from opentelemetry import trace +import google.auth.credentials + + +@pytest.fixture +def creds(): + """ + Provide test creds to unit tests so that they can run without + GOOGLE_APPLICATION_CREDENTIALS set. + """ + yield google.auth.credentials.AnonymousCredentials() + + +@pytest.fixture(scope="session", autouse=True) +def set_trace_provider(): + provider = TracerProvider() + trace.set_tracer_provider(provider) + + +@pytest.fixture(scope="function") +def span_exporter(): + exporter = InMemorySpanExporter() + processor = SimpleSpanProcessor(exporter) + provider = trace.get_tracer_provider() + provider.add_span_processor(processor) + yield exporter + + +@pytest.fixture() +def modify_google_logger_propagation(): + """ + Allow propagation of logs to the root logger for tests + that depend on the caplog fixture. Restore the default + propagation setting after the test finishes. + """ + logger = logging.getLogger("google") + original_propagate = logger.propagate + logger.propagate = True + try: + yield + finally: + logger.propagate = original_propagate diff --git a/tests/unit/pubsub_v1/publisher/batch/test_base.py b/tests/unit/pubsub_v1/publisher/batch/test_base.py index f10b54ee5..ae5dbea04 100644 --- a/tests/unit/pubsub_v1/publisher/batch/test_base.py +++ b/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -14,35 +14,35 @@ from __future__ import absolute_import -import mock from google.auth import credentials from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher._batch.base import BatchStatus from google.cloud.pubsub_v1.publisher._batch.thread import Batch +from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) -def create_batch(status=None, settings=types.BatchSettings()): +def create_batch(status, settings=types.BatchSettings()): """Create a batch object, which does not commit. Args: - status (str): If provided, the batch's internal status will be set - to the provided status. + status (str): The batch's internal status will be set to the provided status. Returns: ~.pubsub_v1.publisher.batch.thread.Batch: The batch object """ - creds = mock.Mock(spec=credentials.Credentials) - client = publisher.Client(credentials=creds) + client = publisher.Client(credentials=credentials.AnonymousCredentials()) batch = Batch(client, "topic_name", settings) - if status: - batch._status = status + batch._status = status return batch def test_len(): batch = create_batch(status=BatchStatus.ACCEPTING_MESSAGES) assert len(batch) == 0 - batch.publish(types.PubsubMessage(data=b"foo")) + batch.publish(PublishMessageWrapper(message=gapic_types.PubsubMessage(data=b"foo"))) assert len(batch) == 1 diff --git a/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/tests/unit/pubsub_v1/publisher/batch/test_thread.py index e9d2b09c0..ad8fa376b 100644 --- a/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -13,14 +13,25 @@ # limitations under the License. import datetime +import sys import threading import time -import mock +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + import pytest +from opentelemetry import trace +from opentelemetry.trace import SpanContext + import google.api_core.exceptions +from google.api_core import gapic_v1 from google.auth import credentials +from google.auth import exceptions as auth_exceptions from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions @@ -28,18 +39,29 @@ from google.cloud.pubsub_v1.publisher._batch.base import BatchCancellationReason from google.cloud.pubsub_v1.publisher._batch import thread from google.cloud.pubsub_v1.publisher._batch.thread import Batch - - -def create_client(): - creds = mock.Mock(spec=credentials.Credentials) - return publisher.Client(credentials=creds) +from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) + + +def create_client(enable_open_telemetry: bool = False): + return publisher.Client( + credentials=credentials.AnonymousCredentials(), + publisher_options=types.PublisherOptions( + enable_open_telemetry_tracing=enable_open_telemetry, + ), + ) def create_batch( topic="topic_name", batch_done_callback=None, commit_when_full=True, - **batch_settings + commit_retry=gapic_v1.method.DEFAULT, + commit_timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, + enable_open_telemetry: bool = False, + **batch_settings, ): """Return a batch object suitable for testing. @@ -49,13 +71,18 @@ def create_batch( the batch is done, either with a success or a failure flag. commit_when_full (bool): Whether to commit the batch when the batch has reached byte-size or number-of-messages limits. + commit_retry (Optional[google.api_core.retry.Retry]): The retry settings + for the batch commit call. + commit_timeout (:class:`~.pubsub_v1.types.TimeoutType`): + The timeout to apply to the batch commit call. + enable_open_telemetry (bool): Whether to enable OpenTelemetry. batch_settings (Mapping[str, str]): Arguments passed on to the :class:``~.pubsub_v1.types.BatchSettings`` constructor. Returns: ~.pubsub_v1.publisher.batch.thread.Batch: A batch object. """ - client = create_client() + client = create_client(enable_open_telemetry=enable_open_telemetry) settings = types.BatchSettings(**batch_settings) return Batch( client, @@ -63,6 +90,8 @@ def create_batch( settings, batch_done_callback=batch_done_callback, commit_when_full=commit_when_full, + commit_retry=commit_retry, + commit_timeout=commit_timeout, ) @@ -111,14 +140,22 @@ def test_commit_no_op(): def test_blocking__commit(): batch = create_batch() futures = ( - batch.publish({"data": b"This is my message."}), - batch.publish({"data": b"This is another message."}), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"This is my message.") + ) + ), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"This is another message.") + ) + ), ) # Set up the underlying API publish method to return a PublishResponse. - publish_response = types.PublishResponse(message_ids=["a", "b"]) + publish_response = gapic_types.PublishResponse(message_ids=["a", "b"]) patch = mock.patch.object( - type(batch.client.api), "publish", return_value=publish_response + type(batch.client), "_gapic_publish", return_value=publish_response ) with patch as publish: batch._commit() @@ -126,11 +163,13 @@ def test_blocking__commit(): # Establish that the underlying API call was made with expected # arguments. publish.assert_called_once_with( - "topic_name", - [ - types.PubsubMessage(data=b"This is my message."), - types.PubsubMessage(data=b"This is another message."), + topic="topic_name", + messages=[ + gapic_types.PubsubMessage(data=b"This is my message."), + gapic_types.PubsubMessage(data=b"This is another message."), ], + retry=gapic_v1.method.DEFAULT, + timeout=gapic_v1.method.DEFAULT, ) # Establish that all of the futures are done, and that they have the @@ -141,28 +180,88 @@ def test_blocking__commit(): assert futures[1].result() == "b" +def test_blocking__commit_custom_retry(): + batch = create_batch(commit_retry=mock.sentinel.custom_retry) + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"This is my message.") + ) + ) + + # Set up the underlying API publish method to return a PublishResponse. + publish_response = gapic_types.PublishResponse(message_ids=["a"]) + patch = mock.patch.object( + type(batch.client), "_gapic_publish", return_value=publish_response + ) + with patch as publish: + batch._commit() + + # Establish that the underlying API call was made with expected + # arguments. + publish.assert_called_once_with( + topic="topic_name", + messages=[gapic_types.PubsubMessage(data=b"This is my message.")], + retry=mock.sentinel.custom_retry, + timeout=gapic_v1.method.DEFAULT, + ) + + +def test_blocking__commit_custom_timeout(): + batch = create_batch(commit_timeout=mock.sentinel.custom_timeout) + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"This is my message.") + ) + ) + + # Set up the underlying API publish method to return a PublishResponse. + publish_response = gapic_types.PublishResponse(message_ids=["a"]) + patch = mock.patch.object( + type(batch.client), "_gapic_publish", return_value=publish_response + ) + with patch as publish: + batch._commit() + + # Establish that the underlying API call was made with expected + # arguments. + publish.assert_called_once_with( + topic="topic_name", + messages=[gapic_types.PubsubMessage(data=b"This is my message.")], + retry=gapic_v1.method.DEFAULT, + timeout=mock.sentinel.custom_timeout, + ) + + def test_client_api_publish_not_blocking_additional_publish_calls(): batch = create_batch(max_messages=1) api_publish_called = threading.Event() - def api_publish_delay(_, messages): + def api_publish_delay(topic="", messages=(), retry=None, timeout=None): api_publish_called.set() time.sleep(1.0) message_ids = [str(i) for i in range(len(messages))] - return types.PublishResponse(message_ids=message_ids) + return gapic_types.PublishResponse(message_ids=message_ids) api_publish_patch = mock.patch.object( - type(batch.client.api), "publish", side_effect=api_publish_delay + type(batch.client), "_gapic_publish", side_effect=api_publish_delay ) with api_publish_patch: - batch.publish({"data": b"first message"}) + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"first message") + ) + ) start = datetime.datetime.now() event_set = api_publish_called.wait(timeout=1.0) - if not event_set: + if not event_set: # pragma: NO COVER pytest.fail("API publish was not called in time") - batch.publish({"data": b"second message"}) + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"second message") + ) + ) end = datetime.datetime.now() # While a batch commit in progress, waiting for the API publish call to @@ -196,7 +295,7 @@ def test_blocking__commit_already_started(_LOGGER): def test_blocking__commit_no_messages(): batch = create_batch() - with mock.patch.object(type(batch.client.api), "publish") as publish: + with mock.patch.object(type(batch.client), "_gapic_publish") as publish: batch._commit() assert publish.call_count == 0 @@ -205,14 +304,22 @@ def test_blocking__commit_no_messages(): def test_blocking__commit_wrong_messageid_length(): batch = create_batch() futures = ( - batch.publish({"data": b"blah blah blah"}), - batch.publish({"data": b"blah blah blah blah"}), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"blah blah blah") + ) + ), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"blah blah blah blah") + ) + ), ) # Set up a PublishResponse that only returns one message ID. - publish_response = types.PublishResponse(message_ids=["a"]) + publish_response = gapic_types.PublishResponse(message_ids=["a"]) patch = mock.patch.object( - type(batch.client.api), "publish", return_value=publish_response + type(batch.client), "_gapic_publish", return_value=publish_response ) with patch: @@ -223,35 +330,57 @@ def test_blocking__commit_wrong_messageid_length(): assert isinstance(future.exception(), exceptions.PublishError) -def test_block__commmit_api_error(): +@pytest.mark.parametrize( + "error", + [ + (google.api_core.exceptions.InternalServerError("Internal server error"),), + (auth_exceptions.TransportError("some transport error"),), + ], +) +def test_block__commmit_api_error(error): batch = create_batch() futures = ( - batch.publish({"data": b"blah blah blah"}), - batch.publish({"data": b"blah blah blah blah"}), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"blah blah blah") + ) + ), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"blah blah blah blah") + ) + ), ) # Make the API throw an error when publishing. - error = google.api_core.exceptions.InternalServerError("uh oh") - patch = mock.patch.object(type(batch.client.api), "publish", side_effect=error) + patch = mock.patch.object(type(batch.client), "_gapic_publish", side_effect=error) with patch: batch._commit() for future in futures: assert future.done() - assert future.exception() == error + assert future.exception() == error[0] def test_block__commmit_retry_error(): batch = create_batch() futures = ( - batch.publish({"data": b"blah blah blah"}), - batch.publish({"data": b"blah blah blah blah"}), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"blah blah blah") + ) + ), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"blah blah blah blah") + ) + ), ) # Make the API throw an error when publishing. error = google.api_core.exceptions.RetryError("uh oh", None) - patch = mock.patch.object(type(batch.client.api), "publish", side_effect=error) + patch = mock.patch.object(type(batch.client), "_gapic_publish", side_effect=error) with patch: batch._commit() @@ -263,24 +392,31 @@ def test_block__commmit_retry_error(): def test_publish_updating_batch_size(): batch = create_batch(topic="topic_foo") - messages = ( - types.PubsubMessage(data=b"foobarbaz"), - types.PubsubMessage(data=b"spameggs"), - types.PubsubMessage(data=b"1335020400"), + wrappers = ( + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz"), + ), + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"spameggs"), + ), + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"1335020400"), + ), ) # Publish each of the messages, which should save them to the batch. - futures = [batch.publish(message) for message in messages] + futures = [batch.publish(wrapper) for wrapper in wrappers] # There should be three messages on the batch, and three futures. - assert len(batch.messages) == 3 + assert len(batch.message_wrappers) == 3 assert batch._futures == futures # The size should have been incremented by the sum of the size # contributions of each message to the PublishRequest. - base_request_size = types.PublishRequest(topic="topic_foo").ByteSize() + base_request_size = gapic_types.PublishRequest(topic="topic_foo")._pb.ByteSize() expected_request_size = base_request_size + sum( - types.PublishRequest(messages=[msg]).ByteSize() for msg in messages + gapic_types.PublishRequest(messages=[wrapper.message])._pb.ByteSize() + for wrapper in wrappers ) assert batch.size == expected_request_size @@ -289,68 +425,82 @@ def test_publish_updating_batch_size(): def test_publish(): batch = create_batch() - message = types.PubsubMessage() - future = batch.publish(message) + wrapper = PublishMessageWrapper(message=gapic_types.PubsubMessage()) + future = batch.publish(wrapper) - assert len(batch.messages) == 1 + assert len(batch.message_wrappers) == 1 assert batch._futures == [future] def test_publish_max_messages_zero(): batch = create_batch(topic="topic_foo", max_messages=0) - - message = types.PubsubMessage(data=b"foobarbaz") + wrapper = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz"), + ) with mock.patch.object(batch, "commit") as commit: - future = batch.publish(message) + future = batch.publish(wrapper) assert future is not None - assert len(batch.messages) == 1 + assert len(batch.message_wrappers) == 1 assert batch._futures == [future] commit.assert_called_once() def test_publish_max_messages_enforced(): batch = create_batch(topic="topic_foo", max_messages=1) + wrapper = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz") + ) + wrapper2 = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz2") + ) - message = types.PubsubMessage(data=b"foobarbaz") - message2 = types.PubsubMessage(data=b"foobarbaz2") - - future = batch.publish(message) - future2 = batch.publish(message2) + future = batch.publish(wrapper) + future2 = batch.publish(wrapper2) assert future is not None assert future2 is None - assert len(batch.messages) == 1 + assert len(batch.message_wrappers) == 1 assert len(batch._futures) == 1 def test_publish_max_bytes_enforced(): batch = create_batch(topic="topic_foo", max_bytes=15) - message = types.PubsubMessage(data=b"foobarbaz") - message2 = types.PubsubMessage(data=b"foobarbaz2") + wrapper = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz") + ) + wrapper2 = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz2") + ) - future = batch.publish(message) - future2 = batch.publish(message2) + future = batch.publish(wrapper) + future2 = batch.publish(wrapper2) assert future is not None assert future2 is None - assert len(batch.messages) == 1 + assert len(batch.message_wrappers) == 1 assert len(batch._futures) == 1 def test_publish_exceed_max_messages(): max_messages = 4 batch = create_batch(max_messages=max_messages) - messages = ( - types.PubsubMessage(data=b"foobarbaz"), - types.PubsubMessage(data=b"spameggs"), - types.PubsubMessage(data=b"1335020400"), + wrappers = ( + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz"), + ), + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"spameggs"), + ), + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"1335020400"), + ), ) # Publish each of the messages, which should save them to the batch. with mock.patch.object(batch, "commit") as commit: - futures = [batch.publish(message) for message in messages] + futures = [batch.publish(wrapper) for wrapper in wrappers] assert batch._futures == futures assert len(futures) == max_messages - 1 @@ -359,7 +509,11 @@ def test_publish_exceed_max_messages(): # When a fourth message is published, commit should be called. # No future will be returned in this case. - future = batch.publish(types.PubsubMessage(data=b"last one")) + future = batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"last one") + ) + ) commit.assert_called_once_with() assert future is None @@ -374,34 +528,40 @@ def test_publish_single_message_size_exceeds_server_size_limit(): max_bytes=1000 * 1000, # way larger than (mocked) server side limit ) - big_message = types.PubsubMessage(data=b"x" * 984) + big_message = gapic_types.PubsubMessage(data=b"x" * 984) - request_size = types.PublishRequest( + request_size = gapic_types.PublishRequest( topic="topic_foo", messages=[big_message] - ).ByteSize() + )._pb.ByteSize() assert request_size == 1001 # sanity check, just above the (mocked) server limit with pytest.raises(exceptions.MessageTooLargeError): - batch.publish(big_message) + batch.publish(wrapper=PublishMessageWrapper(message=big_message)) @mock.patch.object(thread, "_SERVER_PUBLISH_MAX_BYTES", 1000) def test_publish_total_messages_size_exceeds_server_size_limit(): batch = create_batch(topic="topic_foo", max_messages=10, max_bytes=1500) - messages = ( - types.PubsubMessage(data=b"x" * 500), - types.PubsubMessage(data=b"x" * 600), + wrappers = ( + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"x" * 500), + ), + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"x" * 600), + ), ) # Sanity check - request size is still below BatchSettings.max_bytes, # but it exceeds the server-side size limit. - request_size = types.PublishRequest(topic="topic_foo", messages=messages).ByteSize() + request_size = gapic_types.PublishRequest( + topic="topic_foo", messages=[wrapper.message for wrapper in wrappers] + )._pb.ByteSize() assert 1000 < request_size < 1500 with mock.patch.object(batch, "commit") as fake_commit: - batch.publish(messages[0]) - batch.publish(messages[1]) + batch.publish(wrappers[0]) + batch.publish(wrappers[1]) # The server side limit should kick in and cause a commit. fake_commit.assert_called_once() @@ -409,21 +569,40 @@ def test_publish_total_messages_size_exceeds_server_size_limit(): def test_publish_dict(): batch = create_batch() - future = batch.publish({"data": b"foobarbaz", "attributes": {"spam": "eggs"}}) + future = batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage( + data=b"foobarbaz", + attributes={"spam": "eggs"}, + ), + ) + ) # There should be one message on the batch. - expected_message = types.PubsubMessage( - data=b"foobarbaz", attributes={"spam": "eggs"} + expected_message_wrapper = PublishMessageWrapper( + message=gapic_types.PubsubMessage( + data=b"foobarbaz", + attributes={"spam": "eggs"}, + ) ) - assert batch.messages == [expected_message] + + assert batch.message_wrappers == [expected_message_wrapper] assert batch._futures == [future] def test_cancel(): batch = create_batch() futures = ( - batch.publish({"data": b"This is my message."}), - batch.publish({"data": b"This is another message."}), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"This is my message."), + ), + ), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"This is another message."), + ), + ), ) batch.cancel(BatchCancellationReason.PRIOR_ORDERED_MESSAGE_FAILED) @@ -439,19 +618,29 @@ def test_do_not_commit_when_full_when_flag_is_off(): max_messages = 4 # Set commit_when_full flag to False batch = create_batch(max_messages=max_messages, commit_when_full=False) - messages = ( - types.PubsubMessage(data=b"foobarbaz"), - types.PubsubMessage(data=b"spameggs"), - types.PubsubMessage(data=b"1335020400"), + wrappers = ( + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz"), + ), + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"spameggs"), + ), + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"1335020400"), + ), ) with mock.patch.object(batch, "commit") as commit: # Publish 3 messages. - futures = [batch.publish(message) for message in messages] + futures = [batch.publish(wrapper) for wrapper in wrappers] assert len(futures) == 3 # When a fourth message is published, commit should not be called. - future = batch.publish(types.PubsubMessage(data=b"last one")) + future = batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"last one"), + ) + ) assert commit.call_count == 0 assert future is None @@ -471,14 +660,16 @@ def test_batch_done_callback_called_on_success(): batch = create_batch(batch_done_callback=batch_done_callback_tracker) # Ensure messages exist. - message = types.PubsubMessage(data=b"foobarbaz") - batch.publish(message) + wrapper = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz") + ) + batch.publish(wrapper) # One response for one published message. - publish_response = types.PublishResponse(message_ids=["a"]) + publish_response = gapic_types.PublishResponse(message_ids=["a"]) with mock.patch.object( - type(batch.client.api), "publish", return_value=publish_response + type(batch.client), "_gapic_publish", return_value=publish_response ): batch._commit() @@ -491,18 +682,20 @@ def test_batch_done_callback_called_on_publish_failure(): batch = create_batch(batch_done_callback=batch_done_callback_tracker) # Ensure messages exist. - message = types.PubsubMessage(data=b"foobarbaz") - batch.publish(message) + wrapper = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz") + ) + batch.publish(wrapper) # One response for one published message. - publish_response = types.PublishResponse(message_ids=["a"]) + publish_response = gapic_types.PublishResponse(message_ids=["a"]) # Induce publish error. error = google.api_core.exceptions.InternalServerError("uh oh") with mock.patch.object( - type(batch.client.api), - "publish", + type(batch.client), + "_gapic_publish", return_value=publish_response, side_effect=error, ): @@ -517,16 +710,264 @@ def test_batch_done_callback_called_on_publish_response_invalid(): batch = create_batch(batch_done_callback=batch_done_callback_tracker) # Ensure messages exist. - message = types.PubsubMessage(data=b"foobarbaz") - batch.publish(message) + wrapper = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz"), + ) + batch.publish(wrapper) # No message ids returned in successful publish response -> invalid. - publish_response = types.PublishResponse(message_ids=[]) + publish_response = gapic_types.PublishResponse(message_ids=[]) with mock.patch.object( - type(batch.client.api), "publish", return_value=publish_response + type(batch.client), "_gapic_publish", return_value=publish_response ): batch._commit() assert batch_done_callback_tracker.called assert not batch_done_callback_tracker.success + + +# Refer https://opentelemetry.io/docs/languages/python/#version-support +@pytest.mark.skipif( + sys.version_info < (3, 8), reason="Open Telemetry requires python3.8 or higher" +) +def test_open_telemetry_commit_publish_rpc_span_none(span_exporter): + """ + Test scenario where OpenTelemetry is enabled, publish RPC + span creation fails(unexpected) and hence batch._rpc_span is None when + attempting to close it. Required for code coverage. + """ + TOPIC = "projects/projectID/topics/topicID" + batch = create_batch(topic=TOPIC, enable_open_telemetry=True) + + message = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foo"), + ) + message.start_create_span(topic=TOPIC, ordering_key=None) + batch.publish(message) + + # Mock error when publish RPC span creation is attempted. + error = google.api_core.exceptions.InternalServerError("error") + + with mock.patch.object( + type(batch), + "_start_publish_rpc_span", + side_effect=error, + ): + batch._commit() + + assert batch._rpc_span is None + spans = span_exporter.get_finished_spans() + + # Only Create span should be exported, since publish RPC span creation + # should fail with a mock error. + assert len(spans) == 1 + + publish_create_span = spans[0] + assert publish_create_span.status.status_code == trace.status.StatusCode.ERROR + assert publish_create_span.end_time is not None + + assert publish_create_span.name == "topicID create" + # Publish start event and exception event should be present in publish + # create span. + assert len(publish_create_span.events) == 2 + assert publish_create_span.events[0].name == "publish start" + assert publish_create_span.events[1].name == "exception" + + +# Refer https://opentelemetry.io/docs/languages/python/#version-support +@pytest.mark.skipif( + sys.version_info < (3, 8), reason="Open Telemetry requires python3.8 or higher" +) +def test_open_telemetry_commit_publish_rpc_exception(span_exporter): + TOPIC = "projects/projectID/topics/topicID" + batch = create_batch(topic=TOPIC, enable_open_telemetry=True) + + message = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foo"), + ) + message.start_create_span(topic=TOPIC, ordering_key=None) + batch.publish(message) + + # Mock publish error. + error = google.api_core.exceptions.InternalServerError("error") + + with mock.patch.object( + type(batch.client), + "_gapic_publish", + side_effect=error, + ): + batch._commit() + + spans = span_exporter.get_finished_spans() + # Span 1: Publish RPC span + # Span 2: Create span. + assert len(spans) == 2 + + # Verify both spans recorded error and have ended. + for span in spans: + assert span.status.status_code == trace.status.StatusCode.ERROR + assert span.end_time is not None + + publish_rpc_span = spans[0] + assert publish_rpc_span.name == "topicID publish" + assert len(publish_rpc_span.events) == 1 + assert publish_rpc_span.events[0].name == "exception" + + publish_create_span = spans[1] + assert publish_create_span.name == "topicID create" + # Publish start event and exception event should be present in publish + # create span. + assert len(publish_create_span.events) == 2 + assert publish_create_span.events[0].name == "publish start" + assert publish_create_span.events[1].name == "exception" + + +# Refer https://opentelemetry.io/docs/languages/python/#version-support +@pytest.mark.skipif( + sys.version_info < (3, 8), reason="Open Telemetry requires python3.8 or higher" +) +def test_opentelemetry_commit_sampling(span_exporter): + TOPIC = "projects/projectID/topics/topic" + batch = create_batch( + topic=TOPIC, + enable_open_telemetry=True, + ) + + message1 = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foo"), + ) + message1.start_create_span(topic=TOPIC, ordering_key=None) + + message2 = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"bar"), + ) + message2.start_create_span(topic=TOPIC, ordering_key=None) + + # Mock the 'get_span_context' method to return a mock SpanContext + mock_span_context = mock.Mock(spec=SpanContext) + mock_span_context.trace_flags.sampled = False + + batch.publish(message1) + batch.publish(message2) + + publish_response = gapic_types.PublishResponse(message_ids=["a", "b"]) + + # Patch the 'create_span' method to return the mock SpanContext + with mock.patch.object( + message1.create_span, "get_span_context", return_value=mock_span_context + ): + with mock.patch.object( + type(batch.client), "_gapic_publish", return_value=publish_response + ): + batch._commit() + + spans = span_exporter.get_finished_spans() + + # Span 1: Publish RPC span of both messages + # Span 2: Create span of message 1 + # Span 3: Create span of message 2 + assert len(spans) == 3 + + publish_rpc_span, create_span1, create_span2 = spans + + # Verify publish RPC span has only one link corresponding to + # message 2 which is included in the sample. + assert len(publish_rpc_span.links) == 1 + assert len(create_span1.links) == 0 + assert len(create_span2.links) == 1 + assert publish_rpc_span.links[0].context == create_span2.context + assert create_span2.links[0].context == publish_rpc_span.context + + # Verify all spans have ended. + for span in spans: + assert span.end_time is not None + + # Verify both publish create spans have 2 events - publish start and publish + # end. + for span in spans[1:]: + assert len(span.events) == 2 + assert span.events[0].name == "publish start" + assert span.events[1].name == "publish end" + + +@pytest.mark.skipif( + sys.version_info < (3, 8), reason="Open Telemetry requires python3.8 or higher" +) +def test_opentelemetry_commit(span_exporter): + TOPIC = "projects/projectID/topics/topic" + batch = create_batch( + topic=TOPIC, + enable_open_telemetry=True, + ) + + msg1 = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foo"), + ) + msg2 = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"bar"), + ) + msg1.start_create_span(topic=TOPIC, ordering_key=None) + msg2.start_create_span(topic=TOPIC, ordering_key=None) + + # Add both messages to the batch. + batch.publish(msg1) + batch.publish(msg2) + + publish_response = gapic_types.PublishResponse(message_ids=["a", "b"]) + with mock.patch.object( + type(batch.client), "_gapic_publish", return_value=publish_response + ): + batch._commit() + + spans = span_exporter.get_finished_spans() + + # Span 1: publish RPC span - closed after publish RPC success. + # Span 2: publisher create span of message 1 - closed after publish RPC success. + # Span 3: publisher create span of message 2 - closed after publish RPC success. + assert len(spans) == 3 + publish_rpc_span, create_span1, create_span2 = spans + + # Verify publish RPC span + assert publish_rpc_span.name == "topic publish" + assert publish_rpc_span.kind == trace.SpanKind.CLIENT + assert publish_rpc_span.end_time is not None + attributes = publish_rpc_span.attributes + assert attributes["messaging.system"] == "gcp_pubsub" + assert attributes["messaging.destination.name"] == "topic" + assert attributes["gcp.project_id"] == "projectID" + assert attributes["messaging.batch.message_count"] == 2 + assert attributes["messaging.operation"] == "publish" + assert attributes["code.function"] == "_commit" + assert publish_rpc_span.parent is None + # Verify the links correspond to the spans of the published messages. + assert len(publish_rpc_span.links) == 2 + assert publish_rpc_span.links[0].context == create_span1.context + assert publish_rpc_span.links[1].context == create_span2.context + assert len(create_span1.links) == 1 + assert create_span1.links[0].context == publish_rpc_span.get_span_context() + assert len(create_span2.links) == 1 + assert create_span2.links[0].context == publish_rpc_span.get_span_context() + + # Verify spans of the published messages. + assert create_span1.name == "topic create" + assert create_span2.name == "topic create" + + # Verify the publish create spans have been closed after publish success. + assert create_span1.end_time is not None + assert create_span2.end_time is not None + + # Verify message IDs returned from gapic publish are added as attributes + # to the publisher create spans of the messages. + assert "messaging.message.id" in create_span1.attributes + assert create_span1.attributes["messaging.message.id"] == "a" + assert "messaging.message.id" in create_span2.attributes + assert create_span2.attributes["messaging.message.id"] == "b" + + # Verify publish end event added to the span + assert len(create_span1.events) == 2 + assert len(create_span2.events) == 2 + assert create_span1.events[0].name == "publish start" + assert create_span1.events[1].name == "publish end" + assert create_span2.events[0].name == "publish start" + assert create_span2.events[1].name == "publish end" diff --git a/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py b/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py index 08e1954e6..4377d1447 100644 --- a/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py +++ b/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py @@ -13,24 +13,35 @@ # limitations under the License. import concurrent.futures as futures -import mock +import sys + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + import pytest from google.auth import credentials from google.cloud.pubsub_v1 import publisher -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer +from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) _ORDERING_KEY = "ordering_key_1" def create_message(): - return types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + return PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) + ) def create_client(): - creds = mock.Mock(spec=credentials.Credentials) - return publisher.Client(credentials=creds) + return publisher.Client(credentials=credentials.AnonymousCredentials()) def create_ordered_sequencer(client): @@ -172,6 +183,30 @@ def test_basic_publish(): batch.publish.assert_called_once_with(message) +def test_publish_custom_retry(): + client = create_client() + message = create_message() + sequencer = create_ordered_sequencer(client) + + sequencer.publish(message, retry=mock.sentinel.custom_retry) + + assert sequencer._ordered_batches # batch exists + batch = sequencer._ordered_batches[0] + assert batch._commit_retry is mock.sentinel.custom_retry + + +def test_publish_custom_timeout(): + client = create_client() + message = create_message() + sequencer = create_ordered_sequencer(client) + + sequencer.publish(message, timeout=mock.sentinel.custom_timeout) + + assert sequencer._ordered_batches # batch exists + batch = sequencer._ordered_batches[0] + assert batch._commit_timeout is mock.sentinel.custom_timeout + + def test_publish_batch_full(): client = create_client() message = create_message() diff --git a/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py b/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py index 22e24ed06..739bae3bd 100644 --- a/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py +++ b/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py @@ -11,8 +11,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import sys + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock -import mock import pytest from google.auth import credentials @@ -20,15 +26,20 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher._batch import base from google.cloud.pubsub_v1.publisher._sequencer import unordered_sequencer +from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) def create_message(): - return types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + return PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) + ) def create_client(): - creds = mock.Mock(spec=credentials.Credentials) - return publisher.Client(credentials=creds) + return publisher.Client(credentials=credentials.AnonymousCredentials()) def test_stop(): @@ -89,6 +100,28 @@ def test_basic_publish(): batch.publish.assert_called_once_with(message) +def test_publish_custom_retry(): + client = create_client() + message = create_message() + sequencer = unordered_sequencer.UnorderedSequencer(client, "topic_name") + + sequencer.publish(message, retry=mock.sentinel.custom_retry) + + assert sequencer._current_batch is not None + assert sequencer._current_batch._commit_retry is mock.sentinel.custom_retry + + +def test_publish_custom_timeout(): + client = create_client() + message = create_message() + sequencer = unordered_sequencer.UnorderedSequencer(client, "topic_name") + + sequencer.publish(message, timeout=mock.sentinel.custom_timeout) + + assert sequencer._current_batch is not None + assert sequencer._current_batch._commit_timeout is mock.sentinel.custom_timeout + + def test_publish_batch_full(): client = create_client() message = create_message() @@ -108,13 +141,21 @@ def test_publish_batch_full(): def test_publish_after_batch_error(): client = create_client() message = create_message() - batch = mock.Mock(spec=client._batch_class) + + batch = client._batch_class( + client, "topic_name", types.BatchSettings(max_latency=float("inf")) + ) + batch._message_wrappers.append( + mock.Mock(name="message") + ) # Make batch truthy (non-empty). sequencer = unordered_sequencer.UnorderedSequencer(client, "topic_name") sequencer._set_batch(batch) - sequencer.commit() - batch.commit.assert_called_once() + with mock.patch.object(batch, "commit") as fake_batch_commit: + sequencer.commit() + + fake_batch_commit.assert_called_once() # Simulate publish RPC failing. batch._set_status(base.BatchStatus.ERROR) diff --git a/tests/unit/pubsub_v1/publisher/test_flow_controller.py b/tests/unit/pubsub_v1/publisher/test_flow_controller.py index 26a61663b..776c6db41 100644 --- a/tests/unit/pubsub_v1/publisher/test_flow_controller.py +++ b/tests/unit/pubsub_v1/publisher/test_flow_controller.py @@ -16,37 +16,36 @@ import threading import time -import warnings +from typing import Callable +from typing import Sequence +from typing import Union import pytest +import google from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher.flow_controller import FlowController +from google.pubsub_v1 import types as grpc_types def _run_in_daemon( - flow_controller, - action, - messages, - all_done_event, - error_event=None, - action_pause=None, + action: Callable[["google.cloud.pubsub_v1.types.PubsubMessage"], None], + messages: Sequence["google.cloud.pubsub_v1.types.PubsubMessage"], + all_done_event: threading.Event, + error_event: threading.Event = None, + action_pause: Union[int, float] = None, ): - """Run flow controller action (add or remove messages) in a daemon thread. - """ - assert action in ("add", "release") + """Run flow controller action (add or remove messages) in a daemon thread.""" def run_me(): - method = getattr(flow_controller, action) - try: for msg in messages: if action_pause is not None: time.sleep(action_pause) - method(msg) + action(msg) except Exception: - if error_event is not None: + if error_event is not None: # pragma: NO COVER error_event.set() else: all_done_event.set() @@ -66,7 +65,7 @@ def test_no_overflow_no_error(): # there should be no errors for data in (b"foo", b"bar", b"baz"): - msg = types.PubsubMessage(data=data) + msg = grpc_types.PubsubMessage(data=data) flow_controller.add(msg) @@ -79,8 +78,8 @@ def test_overflow_no_error_on_ignore(): flow_controller = FlowController(settings) # there should be no overflow errors - flow_controller.add(types.PubsubMessage(data=b"foo")) - flow_controller.add(types.PubsubMessage(data=b"bar")) + flow_controller.add(grpc_types.PubsubMessage(data=b"foo")) + flow_controller.add(grpc_types.PubsubMessage(data=b"bar")) def test_message_count_overflow_error(): @@ -91,9 +90,9 @@ def test_message_count_overflow_error(): ) flow_controller = FlowController(settings) - flow_controller.add(types.PubsubMessage(data=b"foo")) + flow_controller.add(grpc_types.PubsubMessage(data=b"foo")) with pytest.raises(exceptions.FlowControlLimitError) as error: - flow_controller.add(types.PubsubMessage(data=b"bar")) + flow_controller.add(grpc_types.PubsubMessage(data=b"bar")) assert "messages: 2 / 1" in str(error.value) @@ -109,14 +108,14 @@ def test_byte_size_overflow_error(): # Since the message data itself occupies 100 bytes, it means that both # messages combined will exceed the imposed byte limit of 199, but a single # message will not (the message size overhead is way lower than data size). - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) flow_controller.add(msg1) with pytest.raises(exceptions.FlowControlLimitError) as error: flow_controller.add(msg2) - total_size = msg1.ByteSize() + msg2.ByteSize() + total_size = msg1._pb.ByteSize() + msg2._pb.ByteSize() expected_info = "bytes: {} / 199".format(total_size) assert expected_info in str(error.value) @@ -129,9 +128,9 @@ def test_no_error_on_moderate_message_flow(): ) flow_controller = FlowController(settings) - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) - msg3 = types.PubsubMessage(data=b"z" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) + msg3 = grpc_types.PubsubMessage(data=b"z" * 100) # The flow control settings will accept two in-flight messages, but not three. # If releasing messages works correctly, the sequence below will not raise errors. @@ -151,14 +150,14 @@ def test_rejected_messages_do_not_increase_total_load(): ) flow_controller = FlowController(settings) - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) flow_controller.add(msg1) for _ in range(5): with pytest.raises(exceptions.FlowControlLimitError): - flow_controller.add(types.PubsubMessage(data=b"z" * 100)) + flow_controller.add(grpc_types.PubsubMessage(data=b"z" * 100)) # After releasing a message we should again be able to add another one, despite # previously trying to add a lot of other messages. @@ -174,12 +173,15 @@ def test_incorrectly_releasing_too_many_messages(): ) flow_controller = FlowController(settings) - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) - msg3 = types.PubsubMessage(data=b"z" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) + msg3 = grpc_types.PubsubMessage(data=b"z" * 100) # Releasing a message that would make the load negative should result in a warning. - with warnings.catch_warnings(record=True) as warned: + with pytest.warns( + RuntimeWarning, + match="Releasing a message that was never added or already released", + ) as warned: flow_controller.release(msg1) assert len(warned) == 1 @@ -196,7 +198,7 @@ def test_incorrectly_releasing_too_many_messages(): error_msg = str(error.value) assert "messages: 2 / 1" in error_msg - total_size = msg2.ByteSize() + msg3.ByteSize() + total_size = msg2._pb.ByteSize() + msg3._pb.ByteSize() expected_size_info = "bytes: {} / 150".format(total_size) assert expected_size_info in error_msg @@ -209,10 +211,10 @@ def test_blocking_on_overflow_until_free_capacity(): ) flow_controller = FlowController(settings) - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) - msg3 = types.PubsubMessage(data=b"z" * 100) - msg4 = types.PubsubMessage(data=b"w" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) + msg3 = grpc_types.PubsubMessage(data=b"z" * 100) + msg4 = grpc_types.PubsubMessage(data=b"w" * 100) # If there is a concurrency bug in FlowController, we do not want to block # the main thread running the tests, thus we delegate all add/release @@ -226,29 +228,31 @@ def test_blocking_on_overflow_until_free_capacity(): releasing_x_done = threading.Event() # Adding a message with free capacity should not block. - _run_in_daemon(flow_controller, "add", [msg1], adding_1_done) + _run_in_daemon(flow_controller.add, [msg1], adding_1_done) if not adding_1_done.wait(timeout=0.1): - pytest.fail("Adding a message with enough flow capacity blocked or errored.") + pytest.fail( # pragma: NO COVER + "Adding a message with enough flow capacity blocked or errored." + ) # Adding messages when there is not enough capacity should block, even if # added through multiple threads. - _run_in_daemon(flow_controller, "add", [msg2], adding_2_done) + _run_in_daemon(flow_controller.add, [msg2], adding_2_done) if adding_2_done.wait(timeout=0.1): - pytest.fail("Adding a message on overflow did not block.") + pytest.fail("Adding a message on overflow did not block.") # pragma: NO COVER - _run_in_daemon(flow_controller, "add", [msg3], adding_3_done) + _run_in_daemon(flow_controller.add, [msg3], adding_3_done) if adding_3_done.wait(timeout=0.1): - pytest.fail("Adding a message on overflow did not block.") + pytest.fail("Adding a message on overflow did not block.") # pragma: NO COVER - _run_in_daemon(flow_controller, "add", [msg4], adding_4_done) + _run_in_daemon(flow_controller.add, [msg4], adding_4_done) if adding_4_done.wait(timeout=0.1): - pytest.fail("Adding a message on overflow did not block.") + pytest.fail("Adding a message on overflow did not block.") # pragma: NO COVER # After releasing one message, there should be room for a new message, which # should result in unblocking one of the waiting threads. - _run_in_daemon(flow_controller, "release", [msg1], releasing_1_done) + _run_in_daemon(flow_controller.release, [msg1], releasing_1_done) if not releasing_1_done.wait(timeout=0.1): - pytest.fail("Releasing a message blocked or errored.") + pytest.fail("Releasing a message blocked or errored.") # pragma: NO COVER done_status = [ adding_2_done.wait(timeout=0.1), @@ -263,10 +267,10 @@ def test_blocking_on_overflow_until_free_capacity(): # Release another message and verify that yet another thread gets unblocked. added_msg = [msg2, msg3, msg4][done_status.index(True)] - _run_in_daemon(flow_controller, "release", [added_msg], releasing_x_done) + _run_in_daemon(flow_controller.release, [added_msg], releasing_x_done) if not releasing_x_done.wait(timeout=0.1): - pytest.fail("Releasing messages blocked or errored.") + pytest.fail("Releasing messages blocked or errored.") # pragma: NO COVER released_count = sum( ( @@ -286,11 +290,11 @@ def test_error_if_mesage_would_block_indefinitely(): ) flow_controller = FlowController(settings) - msg = types.PubsubMessage(data=b"xyz") + msg = grpc_types.PubsubMessage(data=b"xyz") adding_done = threading.Event() error_event = threading.Event() - _run_in_daemon(flow_controller, "add", [msg], adding_done, error_event=error_event) + _run_in_daemon(flow_controller.add, [msg], adding_done, error_event=error_event) assert error_event.wait(timeout=0.1), "No error on adding too large a message." @@ -303,7 +307,7 @@ def test_error_if_mesage_would_block_indefinitely(): error_msg = str(error_info.value) assert "would block forever" in error_msg assert "messages: 1 / 0" in error_msg - assert "bytes: {} / 1".format(msg.ByteSize()) in error_msg + assert "bytes: {} / 1".format(msg._pb.ByteSize()) in error_msg def test_threads_posting_large_messages_do_not_starve(): @@ -314,7 +318,7 @@ def test_threads_posting_large_messages_do_not_starve(): ) flow_controller = FlowController(settings) - large_msg = types.PubsubMessage(data=b"x" * 100) # close to entire byte limit + large_msg = grpc_types.PubsubMessage(data=b"x" * 100) # close to entire byte limit adding_initial_done = threading.Event() adding_large_done = threading.Event() @@ -325,43 +329,78 @@ def test_threads_posting_large_messages_do_not_starve(): # Occupy some of the flow capacity, then try to add a large message. Releasing # enough messages should eventually allow the large message to come through, even # if more messages are added after it (those should wait for the large message). - initial_messages = [types.PubsubMessage(data=b"x" * 10)] * 5 - _run_in_daemon(flow_controller, "add", initial_messages, adding_initial_done) + initial_messages = [grpc_types.PubsubMessage(data=b"x" * 10)] * 5 + _run_in_daemon(flow_controller.add, initial_messages, adding_initial_done) assert adding_initial_done.wait(timeout=0.1) - _run_in_daemon(flow_controller, "add", [large_msg], adding_large_done) + _run_in_daemon(flow_controller.add, [large_msg], adding_large_done) # Continuously keep adding more messages after the large one. - messages = [types.PubsubMessage(data=b"x" * 10)] * 10 - _run_in_daemon(flow_controller, "add", messages, adding_busy_done, action_pause=0.1) + messages = [grpc_types.PubsubMessage(data=b"x" * 10)] * 10 + _run_in_daemon(flow_controller.add, messages, adding_busy_done, action_pause=0.1) # At the same time, gradually keep releasing the messages - the freeed up # capacity should be consumed by the large message, not the other small messages # being added after it. _run_in_daemon( - flow_controller, "release", messages, releasing_busy_done, action_pause=0.1 + flow_controller.release, messages, releasing_busy_done, action_pause=0.1 ) # Sanity check - releasing should have completed by now. if not releasing_busy_done.wait(timeout=1.1): - pytest.fail("Releasing messages blocked or errored.") + pytest.fail("Releasing messages blocked or errored.") # pragma: NO COVER # Enough messages released, the large message should have come through in # the meantime. if not adding_large_done.wait(timeout=0.1): - pytest.fail("A thread adding a large message starved.") + pytest.fail("A thread adding a large message starved.") # pragma: NO COVER if adding_busy_done.wait(timeout=0.1): - pytest.fail("Adding multiple small messages did not block.") + pytest.fail("Adding multiple small messages did not block.") # pragma: NO COVER # Releasing the large message should unblock adding the remaining "busy" messages # that have not been added yet. - _run_in_daemon(flow_controller, "release", [large_msg], releasing_large_done) + _run_in_daemon(flow_controller.release, [large_msg], releasing_large_done) if not releasing_large_done.wait(timeout=0.1): - pytest.fail("Releasing a message blocked or errored.") + pytest.fail("Releasing a message blocked or errored.") # pragma: NO COVER if not adding_busy_done.wait(timeout=1.0): - pytest.fail("Adding messages blocked or errored.") + pytest.fail("Adding messages blocked or errored.") # pragma: NO COVER + + +def test_blocked_messages_are_accepted_in_fifo_order(): + settings = types.PublishFlowControl( + message_limit=1, + byte_limit=1_000_000, # Unlimited for practical purposes in the test. + limit_exceeded_behavior=types.LimitExceededBehavior.BLOCK, + ) + flow_controller = FlowController(settings) + + # It's OK if the message instance is shared, as flow controlelr is only concerned + # with byte sizes and counts, and not with particular message instances. + message = grpc_types.PubsubMessage(data=b"x") + + adding_done_events = [threading.Event() for _ in range(10)] + releasing_done_events = [threading.Event() for _ in adding_done_events] + + # Add messages. The first one will be accepted, and the rest should queue behind. + for adding_done in adding_done_events: + _run_in_daemon(flow_controller.add, [message], adding_done) + time.sleep(0.1) + + if not adding_done_events[0].wait(timeout=0.1): # pragma: NO COVER + pytest.fail("The first message unexpectedly got blocked on adding.") + + # For each message, check that it has indeed been added to the flow controller. + # Then release it to make room for the next message in line, and repeat the check. + enumeration = enumerate(zip(adding_done_events, releasing_done_events)) + for i, (adding_done, releasing_done) in enumeration: + if not adding_done.wait(timeout=0.1): # pragma: NO COVER + pytest.fail(f"Queued message still blocked on adding (i={i}).") + + _run_in_daemon(flow_controller.release, [message], releasing_done) + if not releasing_done.wait(timeout=0.1): # pragma: NO COVER + pytest.fail(f"Queued message was not released in time (i={i}).") def test_warning_on_internal_reservation_stats_error_when_unblocking(): @@ -372,8 +411,8 @@ def test_warning_on_internal_reservation_stats_error_when_unblocking(): ) flow_controller = FlowController(settings) - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) # If there is a concurrency bug in FlowController, we do not want to block # the main thread running the tests, thus we delegate all add/release @@ -384,25 +423,27 @@ def test_warning_on_internal_reservation_stats_error_when_unblocking(): releasing_1_done = threading.Event() # Adding a message with free capacity should not block. - _run_in_daemon(flow_controller, "add", [msg1], adding_1_done) + _run_in_daemon(flow_controller.add, [msg1], adding_1_done) if not adding_1_done.wait(timeout=0.1): - pytest.fail("Adding a message with enough flow capacity blocked or errored.") + pytest.fail( # pragma: NO COVER + "Adding a message with enough flow capacity blocked or errored." + ) # Adding messages when there is not enough capacity should block, even if # added through multiple threads. - _run_in_daemon(flow_controller, "add", [msg2], adding_2_done) + _run_in_daemon(flow_controller.add, [msg2], adding_2_done) if adding_2_done.wait(timeout=0.1): - pytest.fail("Adding a message on overflow did not block.") + pytest.fail("Adding a message on overflow did not block.") # pragma: NO COVER # Intentionally corrupt internal stats - reservation = next(iter(flow_controller._byte_reservations.values()), None) + reservation = next(iter(flow_controller._waiting.values()), None) assert reservation is not None, "No messages blocked by flow controller." - reservation.reserved = reservation.needed + 1 + reservation.bytes_reserved = reservation.bytes_needed + 1 - with warnings.catch_warnings(record=True) as warned: - _run_in_daemon(flow_controller, "release", [msg1], releasing_1_done) + with pytest.warns(RuntimeWarning, match="Too many bytes reserved.") as warned: + _run_in_daemon(flow_controller.release, [msg1], releasing_1_done) if not releasing_1_done.wait(timeout=0.1): - pytest.fail("Releasing a message blocked or errored.") + pytest.fail("Releasing a message blocked or errored.") # pragma: NO COVER matches = [warning for warning in warned if warning.category is RuntimeWarning] assert len(matches) == 1 diff --git a/tests/unit/pubsub_v1/publisher/test_futures_publisher.py b/tests/unit/pubsub_v1/publisher/test_futures_publisher.py index eb32d0518..45bc48542 100644 --- a/tests/unit/pubsub_v1/publisher/test_futures_publisher.py +++ b/tests/unit/pubsub_v1/publisher/test_futures_publisher.py @@ -20,6 +20,14 @@ class TestFuture(object): + def test_cancel(self): + future = futures.Future() + assert future.cancel() is False + + def test_cancelled(self): + future = futures.Future() + assert future.cancelled() is False + def test_result_on_success(self): future = futures.Future() future.set_result("570307942214048") diff --git a/tests/unit/pubsub_v1/publisher/test_publish_message_wrapper.py b/tests/unit/pubsub_v1/publisher/test_publish_message_wrapper.py new file mode 100644 index 000000000..e100950ad --- /dev/null +++ b/tests/unit/pubsub_v1/publisher/test_publish_message_wrapper.py @@ -0,0 +1,55 @@ +# Copyright 2019, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) + + +def test_message_setter(): + wrapper = PublishMessageWrapper(message=gapic_types.PubsubMessage(data=b"foo")) + another_message = gapic_types.PubsubMessage(data=b"bar") + wrapper.message = another_message + + assert wrapper.message == another_message + + +def test_eq(): + wrapper1 = PublishMessageWrapper(message=gapic_types.PubsubMessage(data=b"foo")) + wrapper2 = PublishMessageWrapper(message=gapic_types.PubsubMessage(data=b"bar")) + wrapper3 = PublishMessageWrapper(message=gapic_types.PubsubMessage(data=b"foo")) + + assert wrapper1.__eq__(wrapper2) is False + assert wrapper1.__eq__(wrapper3) is True + + +def test_end_create_span(): + wrapper = PublishMessageWrapper(message=gapic_types.PubsubMessage(data=b"foo")) + with pytest.raises(AssertionError): + wrapper.end_create_span() + + +def test_end_publisher_flow_control_span(): + wrapper = PublishMessageWrapper(message=gapic_types.PubsubMessage(data=b"foo")) + with pytest.raises(AssertionError): + wrapper.end_publisher_flow_control_span() + + +def test_end_publisher_batching_span(): + wrapper = PublishMessageWrapper(message=gapic_types.PubsubMessage(data=b"foo")) + with pytest.raises(AssertionError): + wrapper.end_publisher_batching_span() diff --git a/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/tests/unit/pubsub_v1/publisher/test_publisher_client.py index b58ed133f..651c040ba 100644 --- a/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -15,76 +15,426 @@ from __future__ import absolute_import from __future__ import division -from google.auth import credentials +import inspect +import sys + +import grpc +import math + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock -import mock import pytest import time +from flaky import flaky +from typing import cast, Callable, Any, TypeVar + +from opentelemetry import trace +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core.gapic_v1.client_info import METRICS_METADATA_KEY +from google.api_core.timeout import ConstantTimeout -from google.cloud.pubsub_v1.gapic import publisher_client from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types - from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer +from google.pubsub_v1 import types as gapic_types +from google.pubsub_v1.services.publisher import client as publisher_client +from google.pubsub_v1.services.publisher.transports.grpc import PublisherGrpcTransport +from google.cloud.pubsub_v1.open_telemetry.context_propagation import ( + OpenTelemetryContextSetter, +) +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) + + +C = TypeVar("C", bound=Callable[..., Any]) +typed_flaky = cast(Callable[[C], C], flaky(max_runs=5, min_passes=1)) + + +# Attempt to use `_thunk` to obtain the underlying grpc channel from +# the intercept channel. Default to obtaining the grpc channel directly +# for backwards compatibility. +# TODO(https://github.com/grpc/grpc/issues/38519): Workaround to obtain a channel +# until a public API is available. +def get_publish_channel(client): + try: + return client._transport.publish._thunk("")._channel + except AttributeError: + return client._transport.publish._channel + + +def _assert_retries_equal(retry, retry2): + # Retry instances cannot be directly compared, because their predicates are + # different instances of the same function. We thus manually compare their other + # attributes, and then heuristically compare their predicates. + for attr in ("_deadline", "_initial", "_maximum", "_multiplier"): + assert getattr(retry, attr) == getattr(retry2, attr) + + pred = retry._predicate + pred2 = retry2._predicate + assert inspect.getsource(pred) == inspect.getsource(pred2) + assert inspect.getclosurevars(pred) == inspect.getclosurevars(pred2) + + +def test_api_property_deprecated(creds): + client = publisher.Client(credentials=creds) + with pytest.warns(DeprecationWarning, match="client.api") as warned: + client.api -def test_init(): - creds = mock.Mock(spec=credentials.Credentials) + assert len(warned) == 1 + assert issubclass(warned[0].category, DeprecationWarning) + warning_msg = str(warned[0].message) + assert "client.api" in warning_msg + + +def test_api_property_proxy_to_generated_client(creds): client = publisher.Client(credentials=creds) - # A plain client should have an `api` (the underlying GAPIC) and a - # batch settings object, which should have the defaults. - assert isinstance(client.api, publisher_client.PublisherClient) + with pytest.warns(DeprecationWarning, match="client.api"): + api_object = client.api + + # Not a perfect check, but we are satisficed if the returned API object indeed + # contains all methods of the generated class. + superclass_attrs = (attr for attr in dir(type(client).__mro__[1])) + assert all( + hasattr(api_object, attr) + for attr in superclass_attrs + if callable(getattr(client, attr)) + ) + + # The resume_publish() method only exists on the hand-written wrapper class. + assert hasattr(client, "resume_publish") + assert not hasattr(api_object, "resume_publish") + + +def test_init(creds): + client = publisher.Client(credentials=creds) + + # A plain client should have a batch settings object containing the defaults. assert client.batch_settings.max_bytes == 1 * 1000 * 1000 assert client.batch_settings.max_latency == 0.01 assert client.batch_settings.max_messages == 100 -def test_init_w_custom_transport(): - transport = object() +def test_init_default_client_info(creds): + client = publisher.Client(credentials=creds) + + installed_version = publisher.client.__version__ + expected_client_info = f"gccl/{installed_version}" + + for wrapped_method in client.transport._wrapped_methods.values(): + user_agent = next( + ( + header_value + for header, header_value in wrapped_method._metadata + if header == METRICS_METADATA_KEY + ), + None, # pragma: NO COVER + ) + assert user_agent is not None + assert expected_client_info in user_agent + + +def test_init_w_custom_transport(creds): + transport = PublisherGrpcTransport(credentials=creds) client = publisher.Client(transport=transport) - # A plain client should have an `api` (the underlying GAPIC) and a - # batch settings object, which should have the defaults. - assert isinstance(client.api, publisher_client.PublisherClient) - assert client.api.transport is transport + # A plain client should have a transport and a batch settings object, which should + # contain the defaults. + assert isinstance(client, publisher_client.PublisherClient) + assert client._transport is transport assert client.batch_settings.max_bytes == 1 * 1000 * 1000 assert client.batch_settings.max_latency == 0.01 assert client.batch_settings.max_messages == 100 -def test_init_w_api_endpoint(): - client_options = {"api_endpoint": "testendpoint.google.com"} - client = publisher.Client(client_options=client_options) +@pytest.mark.parametrize( + "enable_open_telemetry", + [ + True, + False, + ], +) +@typed_flaky +def test_open_telemetry_publisher_options(creds, enable_open_telemetry): + if sys.version_info >= (3, 8) or enable_open_telemetry is False: + client = publisher.Client( + publisher_options=types.PublisherOptions( + enable_open_telemetry_tracing=enable_open_telemetry + ), + credentials=creds, + ) + assert client._open_telemetry_enabled == enable_open_telemetry + else: + # Open Telemetry is not supported and hence disabled for Python + # versions 3.7 or below + with pytest.warns( + RuntimeWarning, + match="Open Telemetry for Python version 3.7 or lower is not supported. Disabling Open Telemetry tracing.", + ): + client = publisher.Client( + publisher_options=types.PublisherOptions( + enable_open_telemetry_tracing=enable_open_telemetry + ), + credentials=creds, + ) + assert client._open_telemetry_enabled is False - assert isinstance(client.api, publisher_client.PublisherClient) - assert (client.api.transport._channel._channel.target()).decode( - "utf-8" - ) == "testendpoint.google.com" +def test_opentelemetry_context_setter(): + msg = gapic_types.PubsubMessage(data=b"foo") + OpenTelemetryContextSetter().set(carrier=msg, key="key", value="bar") -def test_init_w_unicode_api_endpoint(): - client_options = {"api_endpoint": u"testendpoint.google.com"} - client = publisher.Client(client_options=client_options) + assert "googclient_key" in msg.attributes.keys() - assert isinstance(client.api, publisher_client.PublisherClient) - assert (client.api.transport._channel._channel.target()).decode( - "utf-8" - ) == "testendpoint.google.com" +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +def test_opentelemetry_context_propagation(creds, span_exporter): + TOPIC = "projects/projectID/topics/topicID" + client = publisher.Client( + credentials=creds, + publisher_options=types.PublisherOptions( + enable_open_telemetry_tracing=True, + ), + ) -def test_init_w_empty_client_options(): - client = publisher.Client(client_options={}) + message_mock = mock.Mock(spec=publisher.flow_controller.FlowController.add) + client._flow_controller.add = message_mock + client.publish(TOPIC, b"data") + + message_mock.assert_called_once() + args = message_mock.call_args.args + assert len(args) == 1 + assert "googclient_traceparent" in args[0].attributes + + +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +@pytest.mark.parametrize( + "enable_open_telemetry", + [ + True, + False, + ], +) +def test_opentelemetry_publisher_batching_exception( + creds, span_exporter, enable_open_telemetry +): + client = publisher.Client( + credentials=creds, + publisher_options=types.PublisherOptions( + enable_open_telemetry_tracing=enable_open_telemetry, + ), + ) - assert isinstance(client.api, publisher_client.PublisherClient) - assert (client.api.transport._channel._channel.target()).decode( + # Throw an exception when sequencer.publish() is called + sequencer = mock.Mock(spec=ordered_sequencer.OrderedSequencer) + sequencer.publish = mock.Mock(side_effect=RuntimeError("some error")) + client._get_or_create_sequencer = mock.Mock(return_value=sequencer) + + TOPIC = "projects/projectID/topics/topicID" + with pytest.raises(RuntimeError): + client.publish(TOPIC, b"message") + + spans = span_exporter.get_finished_spans() + + if enable_open_telemetry: + # Span 1: Publisher Flow Control span + # Span 2: Publisher Batching span + # Span 3: Create Publish span + assert len(spans) == 3 + + flow_control_span, batching_span, create_span = spans + + # Verify batching span contents. + assert batching_span.name == "publisher batching" + assert batching_span.kind == trace.SpanKind.INTERNAL + assert batching_span.parent.span_id == create_span.get_span_context().span_id + + # Verify exception recorded by the publisher batching span. + assert batching_span.status.status_code == trace.StatusCode.ERROR + assert len(batching_span.events) == 1 + assert batching_span.events[0].name == "exception" + + # Verify exception recorded by the publisher create span. + assert create_span.status.status_code == trace.StatusCode.ERROR + assert len(create_span.events) == 2 + assert create_span.events[0].name == "publish start" + assert create_span.events[1].name == "exception" + + # Verify the finished flow control span. + assert flow_control_span.name == "publisher flow control" + assert len(flow_control_span.events) == 0 + else: + assert len(spans) == 0 + + +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +def test_opentelemetry_flow_control_exception(creds, span_exporter): + publisher_options = types.PublisherOptions( + flow_control=types.PublishFlowControl( + message_limit=10, + byte_limit=150, + limit_exceeded_behavior=types.LimitExceededBehavior.ERROR, + ), + enable_open_telemetry_tracing=True, + ) + client = publisher.Client(credentials=creds, publisher_options=publisher_options) + + mock_batch = mock.Mock(spec=client._batch_class) + topic = "projects/projectID/topics/topicID" + client._set_batch(topic, mock_batch) + + future1 = client.publish(topic, b"a" * 60) + future2 = client.publish(topic, b"b" * 100) + + future1.result() # no error, still within flow control limits + with pytest.raises(exceptions.FlowControlLimitError): + future2.result() + + spans = span_exporter.get_finished_spans() + + # Find the spans related to the second, failing publish call + failed_create_span = None + failed_fc_span = None + for span in spans: + if span.name == "topicID create": + if span.status.status_code == trace.StatusCode.ERROR: + failed_create_span = span + elif span.name == "publisher flow control": + if span.status.status_code == trace.StatusCode.ERROR: + failed_fc_span = span + + assert failed_create_span is not None, "Failed 'topicID create' span not found" + assert failed_fc_span is not None, "Failed 'publisher flow control' span not found" + + # Verify failed flow control span values. + assert failed_fc_span.kind == trace.SpanKind.INTERNAL + assert ( + failed_fc_span.parent.span_id == failed_create_span.get_span_context().span_id + ) + assert len(failed_fc_span.events) == 1 + assert failed_fc_span.events[0].name == "exception" + + # Verify finished publish create span values + assert failed_create_span.status.status_code == trace.StatusCode.ERROR + assert len(failed_create_span.events) >= 1 # Should have at least 'publish start' + assert failed_create_span.events[0].name == "publish start" + # Check for exception event + has_exception_event = any( + event.name == "exception" for event in failed_create_span.events + ) + assert has_exception_event, "Exception event not found in failed create span" + + +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +def test_opentelemetry_publish(creds, span_exporter): + TOPIC = "projects/projectID/topics/topicID" + client = publisher.Client( + credentials=creds, + publisher_options=types.PublisherOptions( + enable_open_telemetry_tracing=True, + ), + ) + + client.publish(TOPIC, b"message") + spans = span_exporter.get_finished_spans() + + # Publisher Flow control and batching spans would be ended in the + # publish() function and are deterministically expected to be in the + # list of exported spans. The Publish Create span and Publish RPC span + # are run async and end at a non-deterministic time. Hence, + # asserting that we have atleast two spans(flow control and batching span) + assert len(spans) >= 2 + flow_control_span = None + batching_span = None + for span in spans: + if span.name == "publisher flow control": + flow_control_span = span + assert flow_control_span.kind == trace.SpanKind.INTERNAL + assert flow_control_span.parent is not None + if span.name == "publisher batching": + batching_span = span + assert batching_span.kind == trace.SpanKind.INTERNAL + assert batching_span.parent is not None + + assert flow_control_span is not None + assert batching_span is not None + + +def test_init_w_api_endpoint(creds): + client_options = {"api_endpoint": "testendpoint.google.com"} + client = publisher.Client(client_options=client_options, credentials=creds) + + # Behavior to include dns prefix changed in gRPCv1.63 + grpc_major, grpc_minor = [int(part) for part in grpc.__version__.split(".")[0:2]] + if grpc_major > 1 or (grpc_major == 1 and grpc_minor >= 63): + _EXPECTED_TARGET = "dns:///testendpoint.google.com:443" + else: + _EXPECTED_TARGET = "testendpoint.google.com:443" + assert (client._transport.grpc_channel._channel.target()).decode( "utf-8" - ) == publisher_client.PublisherClient.SERVICE_ADDRESS + ) == _EXPECTED_TARGET + + +def test_init_w_empty_client_options(creds): + client = publisher.Client(client_options={}, credentials=creds) + # Behavior to include dns prefix changed in gRPCv1.63 + grpc_major, grpc_minor = [int(part) for part in grpc.__version__.split(".")[0:2]] + if grpc_major > 1 or (grpc_major == 1 and grpc_minor >= 63): + _EXPECTED_TARGET = "dns:///pubsub.googleapis.com:443" + else: + _EXPECTED_TARGET = "pubsub.googleapis.com:443" + assert (client._transport.grpc_channel._channel.target()).decode( + "utf-8" + ) == _EXPECTED_TARGET + + +def test_init_client_options_pass_through(): + mock_ssl_creds = grpc.ssl_channel_credentials() + + def init(self, *args, **kwargs): + self.kwargs = kwargs + self._transport = mock.Mock() + self._transport._host = "testendpoint.google.com" + self._transport._ssl_channel_credentials = mock_ssl_creds + + with mock.patch.object(publisher_client.PublisherClient, "__init__", init): + client = publisher.Client( + client_options={ + "quota_project_id": "42", + "scopes": [], + "credentials_file": "file.json", + } + ) + client_options = client.kwargs["client_options"] + assert client_options.get("quota_project_id") == "42" + assert client_options.get("scopes") == [] + assert client_options.get("credentials_file") == "file.json" + assert client.target == "testendpoint.google.com" + assert client.transport._ssl_channel_credentials == mock_ssl_creds def test_init_emulator(monkeypatch): - monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/foo/bar/") + monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/foo/bar:123") # NOTE: When the emulator host is set, a custom channel will be used, so # no credentials (mock ot otherwise) can be passed in. client = publisher.Client() @@ -93,12 +443,17 @@ def test_init_emulator(monkeypatch): # # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. - channel = client.api.transport.publish._channel - assert channel.target().decode("utf8") == "/foo/bar/" + channel = get_publish_channel(client) + # Behavior to include dns prefix changed in gRPCv1.63 + grpc_major, grpc_minor = [int(part) for part in grpc.__version__.split(".")[0:2]] + if grpc_major > 1 or (grpc_major == 1 and grpc_minor >= 63): + _EXPECTED_TARGET = "dns:////foo/bar:123" + else: + _EXPECTED_TARGET = "/foo/bar:123" + assert channel.target().decode("utf8") == _EXPECTED_TARGET -def test_message_ordering_enabled(): - creds = mock.Mock(spec=credentials.Credentials) +def test_message_ordering_enabled(creds): client = publisher.Client(credentials=creds) assert not client._enable_message_ordering @@ -109,21 +464,7 @@ def test_message_ordering_enabled(): assert client._enable_message_ordering -def test_message_ordering_changes_retry_deadline(): - creds = mock.Mock(spec=credentials.Credentials) - - client = publisher.Client(credentials=creds) - assert client.api._method_configs["Publish"].retry._deadline == 60 - - client = publisher.Client( - publisher_options=types.PublisherOptions(enable_message_ordering=True), - credentials=creds, - ) - assert client.api._method_configs["Publish"].retry._deadline == 2 ** 32 / 1000 - - -def test_publish(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish(creds): client = publisher.Client(credentials=creds) future1 = mock.sentinel.future1 @@ -150,14 +491,23 @@ def test_publish(): # Check mock. batch.publish.assert_has_calls( [ - mock.call(types.PubsubMessage(data=b"spam")), - mock.call(types.PubsubMessage(data=b"foo", attributes={"bar": "baz"})), + mock.call( + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"spam"), + ) + ), + mock.call( + PublishMessageWrapper( + message=gapic_types.PubsubMessage( + data=b"foo", attributes={"bar": "baz"} + ) + ) + ), ] ) -def test_publish_error_exceeding_flow_control_limits(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_error_exceeding_flow_control_limits(creds): publisher_options = types.PublisherOptions( flow_control=types.PublishFlowControl( message_limit=10, @@ -179,26 +529,23 @@ def test_publish_error_exceeding_flow_control_limits(): future2.result() -def test_publish_data_not_bytestring_error(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_data_not_bytestring_error(creds): client = publisher.Client(credentials=creds) topic = "topic/path" with pytest.raises(TypeError): - client.publish(topic, u"This is a text string.") + client.publish(topic, "This is a text string.") with pytest.raises(TypeError): client.publish(topic, 42) -def test_publish_message_ordering_not_enabled_error(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_message_ordering_not_enabled_error(creds): client = publisher.Client(credentials=creds) topic = "topic/path" with pytest.raises(ValueError): client.publish(topic, b"bytestring body", ordering_key="ABC") -def test_publish_empty_ordering_key_when_message_ordering_enabled(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_empty_ordering_key_when_message_ordering_enabled(creds): client = publisher.Client( publisher_options=types.PublisherOptions(enable_message_ordering=True), credentials=creds, @@ -207,8 +554,76 @@ def test_publish_empty_ordering_key_when_message_ordering_enabled(): assert client.publish(topic, b"bytestring body", ordering_key="") is not None -def test_publish_attrs_bytestring(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_with_ordering_key_uses_extended_retry_deadline(creds): + client = publisher.Client( + credentials=creds, + publisher_options=types.PublisherOptions(enable_message_ordering=True), + ) + + # Use mocks in lieu of the actual batch class. + batch = mock.Mock(spec=client._batch_class) + future = mock.sentinel.future + future.add_done_callback = mock.Mock(spec=["__call__"]) + batch.publish.return_value = future + + topic = "topic/path" + client._set_batch(topic, batch) + + # Actually mock the batch class now. + batch_class = mock.Mock(spec=(), return_value=batch) + client._set_batch_class(batch_class) + + # Publish a message with custom retry settings. + custom_retry = retries.Retry( + initial=1, + maximum=20, + multiplier=3.3, + deadline=999, + predicate=retries.if_exception_type(TimeoutError, KeyboardInterrupt), + ) + future = client.publish(topic, b"foo", ordering_key="first", retry=custom_retry) + assert future is mock.sentinel.future + + # Check the retry settings used for the batch. + batch_class.assert_called_once() + _, kwargs = batch_class.call_args + + batch_commit_retry = kwargs["commit_retry"] + expected_retry = custom_retry.with_deadline(2.0**32) + _assert_retries_equal(batch_commit_retry, expected_retry) + + batch_commit_timeout = kwargs["commit_timeout"] + expected_timeout = 2.0**32 + assert batch_commit_timeout == pytest.approx(expected_timeout) + + +def test_publish_with_ordering_key_with_no_retry(creds): + client = publisher.Client( + credentials=creds, + publisher_options=types.PublisherOptions(enable_message_ordering=True), + ) + + # Use mocks in lieu of the actual batch class. + batch = mock.Mock(spec=client._batch_class) + future = mock.sentinel.future + future.add_done_callback = mock.Mock(spec=["__call__"]) + batch.publish.return_value = future + + topic = "topic/path" + client._set_batch(topic, batch) + + # Actually mock the batch class now. + batch_class = mock.Mock(spec=(), return_value=batch) + client._set_batch_class(batch_class) + + future = client.publish(topic, b"foo", ordering_key="first", retry=None) + assert future is mock.sentinel.future + + # Check the retry settings used for the batch. + batch_class.assert_called_once() + + +def test_publish_attrs_bytestring(creds): client = publisher.Client(credentials=creds) # Use a mock in lieu of the actual batch class. @@ -225,12 +640,13 @@ def test_publish_attrs_bytestring(): # The attributes should have been sent as text. batch.publish.assert_called_once_with( - types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) + ) ) -def test_publish_new_batch_needed(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_new_batch_needed(creds): client = publisher.Client(credentials=creds) # Use mocks in lieu of the actual batch class. @@ -255,6 +671,8 @@ def test_publish_new_batch_needed(): future = client.publish(topic, b"foo", bar=b"baz") assert future is mock.sentinel.future + call_args = batch_class.call_args + # Check the mocks. batch_class.assert_called_once_with( client=mock.ANY, @@ -262,22 +680,65 @@ def test_publish_new_batch_needed(): settings=client.batch_settings, batch_done_callback=None, commit_when_full=True, + commit_retry=gapic_v1.method.DEFAULT, + commit_timeout=mock.ANY, ) - message_pb = types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) - batch1.publish.assert_called_once_with(message_pb) - batch2.publish.assert_called_once_with(message_pb) + commit_timeout_arg = call_args[1]["commit_timeout"] + assert isinstance(commit_timeout_arg, ConstantTimeout) + assert math.isclose(commit_timeout_arg._timeout, 60) is True + + message_pb = gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) + wrapper = PublishMessageWrapper(message=message_pb) + batch1.publish.assert_called_once_with(wrapper) + batch2.publish.assert_called_once_with(wrapper) -def test_publish_attrs_type_error(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_attrs_type_error(creds): client = publisher.Client(credentials=creds) topic = "topic/path" with pytest.raises(TypeError): client.publish(topic, b"foo", answer=42) -def test_stop(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_custom_retry_overrides_configured_retry(creds): + client = publisher.Client( + credentials=creds, + publisher_options=types.PublisherOptions(retry=mock.sentinel.publish_retry), + ) + + topic = "topic/path" + client._flow_controller = mock.Mock() + fake_sequencer = mock.Mock() + client._get_or_create_sequencer = mock.Mock(return_value=fake_sequencer) + client.publish(topic, b"hello!", retry=mock.sentinel.custom_retry) + + fake_sequencer.publish.assert_called_once_with( + wrapper=mock.ANY, retry=mock.sentinel.custom_retry, timeout=mock.ANY + ) + message = fake_sequencer.publish.call_args.kwargs["wrapper"].message + assert message.data == b"hello!" + + +def test_publish_custom_timeout_overrides_configured_timeout(creds): + client = publisher.Client( + credentials=creds, + publisher_options=types.PublisherOptions(timeout=mock.sentinel.publish_timeout), + ) + + topic = "topic/path" + client._flow_controller = mock.Mock() + fake_sequencer = mock.Mock() + client._get_or_create_sequencer = mock.Mock(return_value=fake_sequencer) + client.publish(topic, b"hello!", timeout=mock.sentinel.custom_timeout) + + fake_sequencer.publish.assert_called_once_with( + wrapper=mock.ANY, retry=mock.ANY, timeout=mock.sentinel.custom_timeout + ) + message = fake_sequencer.publish.call_args.kwargs["wrapper"].message + assert message.data == b"hello!" + + +def test_stop(creds): client = publisher.Client(credentials=creds) batch1 = mock.Mock(spec=client._batch_class) @@ -298,17 +759,16 @@ def test_stop(): client.stop() -def test_gapic_instance_method(): - creds = mock.Mock(spec=credentials.Credentials) +def test_gapic_instance_method(creds): client = publisher.Client(credentials=creds) - ct = mock.Mock() - client.api._inner_api_calls["create_topic"] = ct + topic = gapic_types.Topic(name="projects/foo/topics/bar") + with mock.patch.object(client, "create_topic") as patched: + client.create_topic(topic) - client.create_topic("projects/foo/topics/bar") - assert ct.call_count == 1 - _, args, _ = ct.mock_calls[0] - assert args[0] == types.Topic(name="projects/foo/topics/bar") + assert patched.call_count == 1 + _, args, _ = patched.mock_calls[0] + assert args[0] == gapic_types.Topic(name="projects/foo/topics/bar") def test_gapic_class_method_on_class(): @@ -327,15 +787,13 @@ def test_class_method_factory(): assert isinstance(client, publisher.Client) -def test_gapic_class_method_on_instance(): - creds = mock.Mock(spec=credentials.Credentials) +def test_gapic_class_method_on_instance(creds): client = publisher.Client(credentials=creds) answer = client.topic_path("foo", "bar") assert answer == "projects/foo/topics/bar" -def test_commit_thread_created_on_publish(): - creds = mock.Mock(spec=credentials.Credentials) +def test_commit_thread_created_on_publish(creds): # Max latency is not infinite so a commit thread is created. batch_settings = types.BatchSettings(max_latency=600) client = publisher.Client(batch_settings=batch_settings, credentials=creds) @@ -358,8 +816,7 @@ def test_commit_thread_created_on_publish(): _start_commit_thread.assert_called_once() -def test_commit_thread_not_created_on_publish_if_max_latency_is_inf(): - creds = mock.Mock(spec=credentials.Credentials) +def test_commit_thread_not_created_on_publish_if_max_latency_is_inf(creds): # Max latency is infinite so a commit thread is not created. batch_settings = types.BatchSettings(max_latency=float("inf")) client = publisher.Client(batch_settings=batch_settings, credentials=creds) @@ -368,8 +825,7 @@ def test_commit_thread_not_created_on_publish_if_max_latency_is_inf(): assert client._commit_thread is None -def test_wait_and_commit_sequencers(): - creds = mock.Mock(spec=credentials.Credentials) +def test_wait_and_commit_sequencers(creds): # Max latency is infinite so a commit thread is not created. # We don't want a commit thread to interfere with this test. batch_settings = types.BatchSettings(max_latency=float("inf")) @@ -377,9 +833,7 @@ def test_wait_and_commit_sequencers(): # Mock out time so no sleep is actually done. with mock.patch.object(time, "sleep"): - with mock.patch.object( - publisher.Client, "_commit_sequencers" - ) as _commit_sequencers: + with mock.patch.object(client, "_commit_sequencers") as _commit_sequencers: assert ( client.publish("topic", b"bytestring body", ordering_key="") is not None ) @@ -389,8 +843,7 @@ def test_wait_and_commit_sequencers(): assert _commit_sequencers.call_count == 1 -def test_stopped_client_does_not_commit_sequencers(): - creds = mock.Mock(spec=credentials.Credentials) +def test_stopped_client_does_not_commit_sequencers(creds): # Max latency is infinite so a commit thread is not created. # We don't want a commit thread to interfere with this test. batch_settings = types.BatchSettings(max_latency=float("inf")) @@ -398,9 +851,7 @@ def test_stopped_client_does_not_commit_sequencers(): # Mock out time so no sleep is actually done. with mock.patch.object(time, "sleep"): - with mock.patch.object( - publisher.Client, "_commit_sequencers" - ) as _commit_sequencers: + with mock.patch.object(client, "_commit_sequencers") as _commit_sequencers: assert ( client.publish("topic", b"bytestring body", ordering_key="") is not None ) @@ -414,8 +865,7 @@ def test_stopped_client_does_not_commit_sequencers(): assert _commit_sequencers.call_count == 0 -def test_publish_with_ordering_key(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_with_ordering_key(creds): publisher_options = types.PublisherOptions(enable_message_ordering=True) client = publisher.Client(publisher_options=publisher_options, credentials=creds) @@ -444,18 +894,23 @@ def test_publish_with_ordering_key(): # Check mock. batch.publish.assert_has_calls( [ - mock.call(types.PubsubMessage(data=b"spam", ordering_key="k1")), mock.call( - types.PubsubMessage( - data=b"foo", attributes={"bar": "baz"}, ordering_key="k1" + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"spam", ordering_key="k1") + ), + ), + mock.call( + PublishMessageWrapper( + message=gapic_types.PubsubMessage( + data=b"foo", attributes={"bar": "baz"}, ordering_key="k1" + ) ) ), ] ) -def test_ordered_sequencer_cleaned_up(): - creds = mock.Mock(spec=credentials.Credentials) +def test_ordered_sequencer_cleaned_up(creds): # Max latency is infinite so a commit thread is not created. # We don't want a commit thread to interfere with this test. batch_settings = types.BatchSettings(max_latency=float("inf")) @@ -483,8 +938,7 @@ def test_ordered_sequencer_cleaned_up(): assert len(client._sequencers) == 0 -def test_resume_publish(): - creds = mock.Mock(spec=credentials.Credentials) +def test_resume_publish(creds): publisher_options = types.PublisherOptions(enable_message_ordering=True) client = publisher.Client(publisher_options=publisher_options, credentials=creds) @@ -494,11 +948,10 @@ def test_resume_publish(): client._set_sequencer(topic=topic, sequencer=sequencer, ordering_key=ordering_key) client.resume_publish(topic, ordering_key) - assert sequencer.unpause.called_once() + sequencer.unpause.assert_called_once() -def test_resume_publish_no_sequencer_found(): - creds = mock.Mock(spec=credentials.Credentials) +def test_resume_publish_no_sequencer_found(creds): publisher_options = types.PublisherOptions(enable_message_ordering=True) client = publisher.Client(publisher_options=publisher_options, credentials=creds) @@ -507,8 +960,7 @@ def test_resume_publish_no_sequencer_found(): client.resume_publish("topic", "ord_key") -def test_resume_publish_ordering_keys_not_enabled(): - creds = mock.Mock(spec=credentials.Credentials) +def test_resume_publish_ordering_keys_not_enabled(creds): publisher_options = types.PublisherOptions(enable_message_ordering=False) client = publisher.Client(publisher_options=publisher_options, credentials=creds) diff --git a/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index 43822e96e..5483c48c5 100644 --- a/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -13,30 +13,45 @@ # limitations under the License. import collections +import queue +import sys import threading -from google.cloud.pubsub_v1 import types +from opentelemetry import trace + from google.cloud.pubsub_v1.subscriber._protocol import dispatcher from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager +from google.cloud.pubsub_v1.subscriber import futures +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) +from google.pubsub_v1.types import PubsubMessage + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock -import mock -from six.moves import queue import pytest +from google.cloud.pubsub_v1.subscriber.exceptions import ( + AcknowledgeStatus, +) @pytest.mark.parametrize( "item,method_name", [ - (requests.AckRequest(0, 0, 0, ""), "ack"), - (requests.DropRequest(0, 0, ""), "drop"), - (requests.LeaseRequest(0, 0, ""), "lease"), - (requests.ModAckRequest(0, 0), "modify_ack_deadline"), - (requests.NackRequest(0, 0, ""), "nack"), + (requests.AckRequest("0", 0, 0, "", None), "ack"), + (requests.DropRequest("0", 0, ""), "drop"), + (requests.LeaseRequest("0", 0, ""), "lease"), + (requests.ModAckRequest("0", 0, None), "modify_ack_deadline"), + (requests.NackRequest("0", 0, "", None), "nack"), ], ) -def test_dispatch_callback(item, method_name): +def test_dispatch_callback_active_manager(item, method_name): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) @@ -48,18 +63,431 @@ def test_dispatch_callback(item, method_name): dispatcher_.dispatch_callback(items) method.assert_called_once_with([item]) + manager._exactly_once_delivery_enabled.assert_called() -def test_dispatch_callback_inactive(): +@pytest.mark.parametrize( + "item,method_name", + [ + (requests.AckRequest("0", 0, 0, "", None), "ack"), + (requests.DropRequest("0", 0, ""), "drop"), + (requests.LeaseRequest("0", 0, ""), "lease"), + (requests.ModAckRequest("0", 0, None), "modify_ack_deadline"), + (requests.NackRequest("0", 0, "", None), "nack"), + ], +) +def test_dispatch_callback_inactive_manager(item, method_name): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) manager.is_active = False dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - dispatcher_.dispatch_callback([requests.AckRequest(0, 0, 0, "")]) + items = [item] + + with mock.patch.object(dispatcher_, method_name) as method: + dispatcher_.dispatch_callback(items) + + method.assert_called_once_with([item]) + manager._exactly_once_delivery_enabled.assert_called() + + +@pytest.mark.parametrize( + "items,method_name", + [ + ( + [ + requests.AckRequest("0", 0, 0, "", None), + requests.AckRequest("0", 0, 1, "", None), + ], + "ack", + ), + ( + [ + requests.DropRequest("0", 0, ""), + requests.DropRequest("0", 1, ""), + ], + "drop", + ), + ( + [ + requests.LeaseRequest("0", 0, ""), + requests.LeaseRequest("0", 1, ""), + ], + "lease", + ), + ( + [ + requests.ModAckRequest("0", 0, None), + requests.ModAckRequest("0", 1, None), + ], + "modify_ack_deadline", + ), + ( + [ + requests.NackRequest("0", 0, "", None), + requests.NackRequest("0", 1, "", None), + ], + "nack", + ), + ], +) +def test_dispatch_duplicate_items_callback_active_manager_no_futures( + items, method_name +): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + manager._exactly_once_delivery_enabled.return_value = False + with mock.patch.object(dispatcher_, method_name) as method: + dispatcher_.dispatch_callback(items) + + method.assert_called_once_with([items[0]]) + manager._exactly_once_delivery_enabled.assert_called() + + +@pytest.mark.parametrize( + "items,method_name", + [ + ( + [ + requests.AckRequest("0", 0, 0, "", None), + requests.AckRequest("0", 0, 1, "", futures.Future()), + ], + "ack", + ), + ( + [ + requests.DropRequest("0", 0, ""), + requests.DropRequest("0", 1, ""), + ], + "drop", + ), + ( + [ + requests.LeaseRequest("0", 0, ""), + requests.LeaseRequest("0", 1, ""), + ], + "lease", + ), + ( + [ + requests.ModAckRequest("0", 0, None), + requests.ModAckRequest("0", 1, futures.Future()), + ], + "modify_ack_deadline", + ), + ( + [ + requests.NackRequest("0", 0, "", None), + requests.NackRequest("0", 1, "", futures.Future()), + ], + "nack", + ), + ], +) +def test_dispatch_duplicate_items_callback_active_manager_with_futures_no_eod( + items, method_name +): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + manager._exactly_once_delivery_enabled.return_value = False + with mock.patch.object(dispatcher_, method_name) as method: + dispatcher_.dispatch_callback(items) + + method.assert_called_once_with([items[0]]) + manager._exactly_once_delivery_enabled.assert_called() + + if method_name != "drop" and method_name != "lease": + assert items[1].future.result() == AcknowledgeStatus.SUCCESS + + +@pytest.mark.parametrize( + "items,method_name", + [ + ( + [ + requests.AckRequest("0", 0, 0, "", None), + requests.AckRequest("0", 0, 1, "", futures.Future()), + ], + "ack", + ), + ( + [ + requests.DropRequest("0", 0, ""), + requests.DropRequest("0", 1, ""), + ], + "drop", + ), + ( + [ + requests.LeaseRequest("0", 0, ""), + requests.LeaseRequest("0", 1, ""), + ], + "lease", + ), + ( + [ + requests.ModAckRequest("0", 0, None), + requests.ModAckRequest("0", 1, futures.Future()), + ], + "modify_ack_deadline", + ), + ( + [ + requests.NackRequest("0", 0, "", None), + requests.NackRequest("0", 1, "", futures.Future()), + ], + "nack", + ), + ], +) +def test_dispatch_duplicate_items_callback_active_manager_with_futures_eod( + items, method_name +): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + manager._exactly_once_delivery_enabled.return_value = True + with mock.patch.object(dispatcher_, method_name) as method: + dispatcher_.dispatch_callback(items) + + method.assert_called_once_with([items[0]]) + manager._exactly_once_delivery_enabled.assert_called() + + if method_name != "drop" and method_name != "lease": + with pytest.raises(ValueError) as err: + items[1].future.result() + assert err.errisinstance(ValueError) + + +def test_dispatch_duplicate_items_diff_types_callback_active_manager_with_futures_eod(): + ack_future = futures.Future() + ack_request = requests.AckRequest("0", 0, 1, "", ack_future) + drop_request = requests.DropRequest("0", 1, "") + lease_request = requests.LeaseRequest("0", 1, "") + nack_future = futures.Future() + nack_request = requests.NackRequest("0", 1, "", nack_future) + modack_future = futures.Future() + modack_request = requests.ModAckRequest("0", 1, modack_future) + + items = [ack_request, drop_request, lease_request, nack_request, modack_request] + + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + manager._exactly_once_delivery_enabled.return_value = True + with mock.patch.multiple( + dispatcher_, + ack=mock.DEFAULT, + nack=mock.DEFAULT, + drop=mock.DEFAULT, + lease=mock.DEFAULT, + modify_ack_deadline=mock.DEFAULT, + ): + dispatcher_.dispatch_callback(items) + manager._exactly_once_delivery_enabled.assert_called() + dispatcher_.ack.assert_called_once_with([ack_request]) + dispatcher_.drop.assert_called_once_with([drop_request]) + dispatcher_.lease.assert_called_once_with([lease_request]) + dispatcher_.nack.assert_called_once_with([nack_request]) + dispatcher_.modify_ack_deadline.assert_called_once_with([modack_request]) + + +@pytest.mark.parametrize( + "items,method_name", + [ + ( + [ + requests.AckRequest("0", 0, 0, "", None), + requests.AckRequest("0", 0, 1, "", None), + ], + "ack", + ), + ( + [ + requests.DropRequest("0", 0, ""), + requests.DropRequest("0", 1, ""), + ], + "drop", + ), + ( + [ + requests.LeaseRequest("0", 0, ""), + requests.LeaseRequest("0", 1, ""), + ], + "lease", + ), + ( + [ + requests.ModAckRequest("0", 0, None), + requests.ModAckRequest("0", 1, None), + ], + "modify_ack_deadline", + ), + ( + [ + requests.NackRequest("0", 0, "", None), + requests.NackRequest("0", 1, "", None), + ], + "nack", + ), + ], +) +def test_dispatch_duplicate_items_callback_active_manager_no_futures_eod( + items, method_name +): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + manager._exactly_once_delivery_enabled.return_value = True + with mock.patch.object(dispatcher_, method_name) as method: + dispatcher_.dispatch_callback(items) + + method.assert_called_once_with([items[0]]) + manager._exactly_once_delivery_enabled.assert_called() + + +def test_unknown_request_type(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + items = ["a random string, not a known request type"] + manager.send_unary_ack.return_value = (items, []) + with pytest.warns(RuntimeWarning, match="Skipping unknown request item of type"): + dispatcher_.dispatch_callback(items) + + +def test_opentelemetry_modify_ack_deadline(span_exporter): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + opentelemetry_data = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + opentelemetry_data.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id", + delivery_attempt=5, + ) + + items = [ + requests.ModAckRequest( + ack_id="ack_id_string", + seconds=60, + future=None, + opentelemetry_data=opentelemetry_data, + ) + ] + manager.send_unary_modack.return_value = (items, []) + dispatcher_.modify_ack_deadline(items) + + # Subscribe span would not have ended as part of a modack. So, end it + # in the test, so that we can export and assert its contents. + opentelemetry_data.end_subscribe_span() + spans = span_exporter.get_finished_spans() + assert len(spans) == 1 + subscribe_span = spans[0] + + assert len(subscribe_span.events) == 2 + assert subscribe_span.events[0].name == "modack start" + assert subscribe_span.events[1].name == "modack end" - manager.send.assert_not_called() + +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +def test_opentelemetry_ack(span_exporter): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + data1 = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + data1.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id", + delivery_attempt=5, + ) + data2 = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + data2.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id", + delivery_attempt=5, + ) + items = [ + requests.AckRequest( + ack_id="ack_id_string", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=None, + opentelemetry_data=data1, + ), + requests.AckRequest( + ack_id="ack_id_string2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=None, + opentelemetry_data=data2, + ), + ] + manager.send_unary_ack.return_value = (items, []) + mock_span_context = mock.Mock(spec=trace.SpanContext) + mock_span_context.trace_flags.sampled = False + with mock.patch.object( + data2._subscribe_span, "get_span_context", return_value=mock_span_context + ): + dispatcher_.ack(items) + + spans = span_exporter.get_finished_spans() + + assert len(spans) == 3 + ack_span = spans[0] + + for subscribe_span in spans[1:]: + assert subscribe_span.attributes["messaging.gcp_pubsub.result"] == "acked" + assert len(subscribe_span.events) == 2 + assert subscribe_span.events[0].name == "ack start" + assert subscribe_span.events[1].name == "ack end" + + # This subscribe span is sampled, so we expect it to be linked to the ack + # span. + assert len(spans[1].links) == 1 + assert spans[1].links[0].context == ack_span.context + assert len(spans[1].links[0].attributes) == 1 + assert spans[1].links[0].attributes["messaging.operation.name"] == "ack" + # This subscribe span is not sampled, so we expect it to not be linked to + # the ack span + assert len(spans[2].links) == 0 + + assert ack_span.name == "subscriptionID ack" + assert ack_span.kind == trace.SpanKind.CLIENT + assert ack_span.parent is None + assert len(ack_span.links) == 1 + assert ack_span.attributes["messaging.system"] == "gcp_pubsub" + assert ack_span.attributes["messaging.batch.message_count"] == 2 + assert ack_span.attributes["messaging.operation"] == "ack" + assert ack_span.attributes["gcp.project_id"] == "projectID" + assert ack_span.attributes["messaging.destination.name"] == "subscriptionID" + assert ack_span.attributes["code.function"] == "ack" def test_ack(): @@ -70,13 +498,18 @@ def test_ack(): items = [ requests.AckRequest( - ack_id="ack_id_string", byte_size=0, time_to_ack=20, ordering_key="" + ack_id="ack_id_string", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=None, ) ] + manager.send_unary_ack.return_value = (items, []) dispatcher_.ack(items) - manager.send.assert_called_once_with( - types.StreamingPullRequest(ack_ids=["ack_id_string"]) + manager.send_unary_ack.assert_called_once_with( + ack_ids=["ack_id_string"], ack_reqs_dict={"ack_id_string": items[0]} ) manager.leaser.remove.assert_called_once_with(items) @@ -92,13 +525,18 @@ def test_ack_no_time(): items = [ requests.AckRequest( - ack_id="ack_id_string", byte_size=0, time_to_ack=None, ordering_key="" + ack_id="ack_id_string", + byte_size=0, + time_to_ack=None, + ordering_key="", + future=None, ) ] + manager.send_unary_ack.return_value = (items, []) dispatcher_.ack(items) - manager.send.assert_called_once_with( - types.StreamingPullRequest(ack_ids=["ack_id_string"]) + manager.send_unary_ack.assert_called_once_with( + ack_ids=["ack_id_string"], ack_reqs_dict={"ack_id_string": items[0]} ) manager.ack_histogram.add.assert_not_called() @@ -113,27 +551,369 @@ def test_ack_splitting_large_payload(): items = [ # use realistic lengths for ACK IDs (max 176 bytes) requests.AckRequest( - ack_id=str(i).zfill(176), byte_size=0, time_to_ack=20, ordering_key="" + ack_id=str(i).zfill(176), + byte_size=0, + time_to_ack=20, + ordering_key="", + future=None, ) for i in range(5001) ] + manager.send_unary_ack.return_value = (items, []) dispatcher_.ack(items) - calls = manager.send.call_args_list - assert len(calls) == 3 + calls = manager.send_unary_ack.call_args_list + assert len(calls) == 6 all_ack_ids = {item.ack_id for item in items} sent_ack_ids = collections.Counter() for call in calls: - message = call.args[0] - assert message.ByteSize() <= 524288 # server-side limit (2**19) - sent_ack_ids.update(message.ack_ids) + ack_ids = call[1]["ack_ids"] + assert len(ack_ids) <= dispatcher._ACK_IDS_BATCH_SIZE + sent_ack_ids.update(ack_ids) assert set(sent_ack_ids) == all_ack_ids # all messages should have been ACK-ed assert sent_ack_ids.most_common(1)[0][1] == 1 # each message ACK-ed exactly once +def test_retry_acks_in_new_thread(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + f = futures.Future() + items = [ + requests.AckRequest( + ack_id="ack_id_string", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=f, + ) + ] + # failure triggers creation of new retry thread + manager.send_unary_ack.side_effect = [([], items)] + with mock.patch("time.sleep", return_value=None): + with mock.patch.object(threading, "Thread", autospec=True) as Thread: + dispatcher_.ack(items) + + assert len(Thread.mock_calls) == 2 + ctor_call = Thread.mock_calls[0] + assert ctor_call.kwargs["name"] == "Thread-RetryAcks" + assert ctor_call.kwargs["target"].args[0] == items + assert ctor_call.kwargs["daemon"] + + +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +def test_opentelemetry_retry_acks(span_exporter): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + data1 = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + data1.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id", + delivery_attempt=5, + ) + data2 = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + data2.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id", + delivery_attempt=5, + ) + + f = futures.Future() + items = [ + requests.AckRequest( + ack_id="ack_id_string", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=f, + opentelemetry_data=data1, + ), + requests.AckRequest( + ack_id="ack_id_string2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=f, + opentelemetry_data=data2, + ), + ] + manager.send_unary_ack.side_effect = [(items, [])] + mock_span_context = mock.Mock(spec=trace.SpanContext) + mock_span_context.trace_flags.sampled = False + with mock.patch("time.sleep", return_value=None): + with mock.patch.object( + data2._subscribe_span, "get_span_context", return_value=mock_span_context + ): + dispatcher_._retry_acks(items) + + spans = span_exporter.get_finished_spans() + + assert len(spans) == 3 + ack_span = spans[0] + + for subscribe_span in spans[1:]: + assert "messaging.gcp_pubsub.result" in subscribe_span.attributes + assert subscribe_span.attributes["messaging.gcp_pubsub.result"] == "acked" + assert len(subscribe_span.events) == 2 + assert subscribe_span.events[0].name == "ack start" + assert subscribe_span.events[1].name == "ack end" + + # This subscribe span is sampled, so we expect it to be linked to the ack + # span. + assert len(spans[1].links) == 1 + assert spans[1].links[0].context == ack_span.context + assert len(spans[1].links[0].attributes) == 1 + assert spans[1].links[0].attributes["messaging.operation.name"] == "ack" + # This subscribe span is not sampled, so we expect it to not be linked to + # the ack span + assert len(spans[2].links) == 0 + + assert ack_span.name == "subscriptionID ack" + assert ack_span.kind == trace.SpanKind.CLIENT + assert ack_span.parent is None + assert len(ack_span.links) == 1 + assert ack_span.attributes["messaging.system"] == "gcp_pubsub" + assert ack_span.attributes["messaging.batch.message_count"] == 2 + assert ack_span.attributes["messaging.operation"] == "ack" + assert ack_span.attributes["gcp.project_id"] == "projectID" + assert ack_span.attributes["messaging.destination.name"] == "subscriptionID" + assert ack_span.attributes["code.function"] == "ack" + + +def test_retry_acks(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + f = futures.Future() + items = [ + requests.AckRequest( + ack_id="ack_id_string", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=f, + ) + ] + # first and second `send_unary_ack` calls fail, third one succeeds + manager.send_unary_ack.side_effect = [([], items), ([], items), (items, [])] + with mock.patch("time.sleep", return_value=None): + dispatcher_._retry_acks(items) + + manager.send_unary_ack.assert_has_calls( + [ + mock.call( + ack_ids=["ack_id_string"], ack_reqs_dict={"ack_id_string": items[0]} + ), + mock.call( + ack_ids=["ack_id_string"], ack_reqs_dict={"ack_id_string": items[0]} + ), + mock.call( + ack_ids=["ack_id_string"], ack_reqs_dict={"ack_id_string": items[0]} + ), + ] + ) + + +def test_retry_modacks_in_new_thread(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + f = futures.Future() + items = [ + requests.ModAckRequest( + ack_id="ack_id_string", + seconds=20, + future=f, + ) + ] + # failure triggers creation of new retry thread + manager.send_unary_modack.side_effect = [([], items)] + with mock.patch("time.sleep", return_value=None): + with mock.patch.object(threading, "Thread", autospec=True) as Thread: + dispatcher_.modify_ack_deadline(items) + + assert len(Thread.mock_calls) == 2 + ctor_call = Thread.mock_calls[0] + assert ctor_call.kwargs["name"] == "Thread-RetryModAcks" + assert ctor_call.kwargs["target"].args[0] == items + assert ctor_call.kwargs["daemon"] + + +def test_opentelemetry_retry_modacks(span_exporter): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + opentelemetry_data = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + opentelemetry_data.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id", + delivery_attempt=5, + ) + + f = futures.Future() + items = [ + requests.ModAckRequest( + ack_id="ack_id_string", + seconds=20, + future=f, + opentelemetry_data=opentelemetry_data, + ) + ] + manager.send_unary_modack.side_effect = [(items, [])] + with mock.patch("time.sleep", return_value=None): + dispatcher_._retry_modacks(items) + + # Subscribe span wouldn't be ended for modacks. So, end it in the test, so + # that we can export and assert its contents. + opentelemetry_data.end_subscribe_span() + spans = span_exporter.get_finished_spans() + assert len(spans) == 1 + subscribe_span = spans[0] + + assert len(subscribe_span.events) == 1 + assert subscribe_span.events[0].name == "modack end" + + +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +def test_opentelemetry_retry_nacks(span_exporter): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + data1 = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + data1.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id1", + delivery_attempt=5, + ) + data2 = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + data2.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id2", + delivery_attempt=5, + ) + + f = futures.Future() + items = [ + requests.ModAckRequest( + ack_id="ack_id1", + seconds=0, + future=f, + opentelemetry_data=data1, + ), + requests.ModAckRequest( + ack_id="ack_id2", + seconds=0, + future=f, + opentelemetry_data=data2, + ), + ] + manager.send_unary_modack.side_effect = [(items, [])] + mock_span_context = mock.Mock(spec=trace.SpanContext) + mock_span_context.trace_flags.sampled = False + with mock.patch("time.sleep", return_value=None): + with mock.patch.object( + data2._subscribe_span, "get_span_context", return_value=mock_span_context + ): + dispatcher_._retry_modacks(items) + + spans = span_exporter.get_finished_spans() + assert len(spans) == 3 + nack_span = spans[0] + + for subscribe_span in spans[1:]: + assert "messaging.gcp_pubsub.result" in subscribe_span.attributes + assert subscribe_span.attributes["messaging.gcp_pubsub.result"] == "nacked" + assert len(subscribe_span.events) == 1 + assert subscribe_span.events[0].name == "nack end" + + # This subscribe span is sampled, so we expect it to be linked to the nack + # span. + assert len(spans[1].links) == 1 + assert spans[1].links[0].context == nack_span.context + assert len(spans[1].links[0].attributes) == 1 + assert spans[1].links[0].attributes["messaging.operation.name"] == "nack" + # This subscribe span is not sampled, so we expect it to not be linked to + # the nack span + assert len(spans[2].links) == 0 + + assert nack_span.name == "subscriptionID nack" + assert nack_span.kind == trace.SpanKind.CLIENT + assert nack_span.parent is None + assert len(nack_span.links) == 1 + assert nack_span.attributes["messaging.system"] == "gcp_pubsub" + assert nack_span.attributes["messaging.batch.message_count"] == 2 + assert nack_span.attributes["messaging.operation"] == "nack" + assert nack_span.attributes["gcp.project_id"] == "projectID" + assert nack_span.attributes["messaging.destination.name"] == "subscriptionID" + assert nack_span.attributes["code.function"] == "modify_ack_deadline" + + +def test_retry_modacks(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + f = futures.Future() + items = [ + requests.ModAckRequest( + ack_id="ack_id_string", + seconds=20, + future=f, + ) + ] + # first and second calls fail, third one succeeds + manager.send_unary_modack.side_effect = [([], items), ([], items), (items, [])] + with mock.patch("time.sleep", return_value=None): + dispatcher_._retry_modacks(items) + + manager.send_unary_modack.assert_has_calls( + [ + mock.call( + modify_deadline_ack_ids=["ack_id_string"], + modify_deadline_seconds=[20], + ack_reqs_dict={"ack_id_string": items[0]}, + ), + mock.call( + modify_deadline_ack_ids=["ack_id_string"], + modify_deadline_seconds=[20], + ack_reqs_dict={"ack_id_string": items[0]}, + ), + mock.call( + modify_deadline_ack_ids=["ack_id_string"], + modify_deadline_seconds=[20], + ack_reqs_dict={"ack_id_string": items[0]}, + ), + ] + ) + + def test_lease(): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True @@ -183,6 +963,103 @@ def test_drop_ordered_messages(): manager.maybe_resume_consumer.assert_called_once() +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +def test_opentelemetry_nack(span_exporter): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + data1 = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + data1.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id", + delivery_attempt=5, + ) + data2 = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + data2.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id2", + delivery_attempt=5, + ) + + items = [ + requests.NackRequest( + ack_id="ack_id", + byte_size=10, + ordering_key="", + future=None, + opentelemetry_data=data1, + ), + requests.NackRequest( + ack_id="ack_id2", + byte_size=10, + ordering_key="", + future=None, + opentelemetry_data=data2, + ), + ] + response_items = [ + requests.ModAckRequest( + ack_id="ack_id", + seconds=0, + future=None, + opentelemetry_data=data1, + ), + requests.ModAckRequest( + ack_id="ack_id2", + seconds=0, + future=None, + opentelemetry_data=data2, + ), + ] + manager.send_unary_modack.return_value = (response_items, []) + + mock_span_context = mock.Mock(spec=trace.SpanContext) + mock_span_context.trace_flags.sampled = False + with mock.patch.object( + data2._subscribe_span, "get_span_context", return_value=mock_span_context + ): + dispatcher_.nack(items) + + spans = span_exporter.get_finished_spans() + + assert len(spans) == 3 + nack_span = spans[0] + for subscribe_span in spans[1:]: + assert "messaging.gcp_pubsub.result" in subscribe_span.attributes + assert subscribe_span.attributes["messaging.gcp_pubsub.result"] == "nacked" + assert len(subscribe_span.events) == 2 + assert subscribe_span.events[0].name == "nack start" + assert subscribe_span.events[1].name == "nack end" + + # This subscribe span is sampled, so we expect it to be linked to the nack + # span. + assert len(spans[1].links) == 1 + assert spans[1].links[0].context == nack_span.context + assert len(spans[1].links[0].attributes) == 1 + assert spans[1].links[0].attributes["messaging.operation.name"] == "nack" + # This subscribe span is not sampled, so we expect it to not be linked to + # the nack span + assert len(spans[2].links) == 0 + + assert nack_span.name == "subscriptionID nack" + assert nack_span.kind == trace.SpanKind.CLIENT + assert nack_span.parent is None + assert len(nack_span.links) == 1 + assert nack_span.attributes["messaging.system"] == "gcp_pubsub" + assert nack_span.attributes["messaging.batch.message_count"] == 2 + assert nack_span.attributes["messaging.operation"] == "nack" + assert nack_span.attributes["gcp.project_id"] == "projectID" + assert nack_span.attributes["messaging.destination.name"] == "subscriptionID" + assert nack_span.attributes["code.function"] == "modify_ack_deadline" + + def test_nack(): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True @@ -190,15 +1067,26 @@ def test_nack(): dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) items = [ - requests.NackRequest(ack_id="ack_id_string", byte_size=10, ordering_key="") + requests.NackRequest( + ack_id="ack_id_string", byte_size=10, ordering_key="", future=None + ) ] + manager.send_unary_modack.return_value = (items, []) dispatcher_.nack(items) + calls = manager.send_unary_modack.call_args_list + assert len(calls) == 1 - manager.send.assert_called_once_with( - types.StreamingPullRequest( - modify_deadline_ack_ids=["ack_id_string"], modify_deadline_seconds=[0] - ) - ) + for call in calls: + modify_deadline_ack_ids = call[1]["modify_deadline_ack_ids"] + assert list(modify_deadline_ack_ids) == ["ack_id_string"] + modify_deadline_seconds = call[1]["modify_deadline_seconds"] + assert list(modify_deadline_seconds) == [0] + ack_reqs_dict = call[1]["ack_reqs_dict"] + assert ack_reqs_dict == { + "ack_id_string": requests.ModAckRequest( + ack_id="ack_id_string", seconds=0, future=None + ) + } def test_modify_ack_deadline(): @@ -207,14 +1095,19 @@ def test_modify_ack_deadline(): ) dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - items = [requests.ModAckRequest(ack_id="ack_id_string", seconds=60)] + items = [requests.ModAckRequest(ack_id="ack_id_string", seconds=60, future=None)] + manager.send_unary_modack.return_value = (items, []) dispatcher_.modify_ack_deadline(items) + calls = manager.send_unary_modack.call_args_list + assert len(calls) == 1 - manager.send.assert_called_once_with( - types.StreamingPullRequest( - modify_deadline_ack_ids=["ack_id_string"], modify_deadline_seconds=[60] - ) - ) + for call in calls: + modify_deadline_ack_ids = call[1]["modify_deadline_ack_ids"] + assert list(modify_deadline_ack_ids) == ["ack_id_string"] + modify_deadline_seconds = call[1]["modify_deadline_seconds"] + assert list(modify_deadline_seconds) == [60] + ack_reqs_dict = call[1]["ack_reqs_dict"] + assert ack_reqs_dict == {"ack_id_string": items[0]} def test_modify_ack_deadline_splitting_large_payload(): @@ -225,21 +1118,55 @@ def test_modify_ack_deadline_splitting_large_payload(): items = [ # use realistic lengths for ACK IDs (max 176 bytes) - requests.ModAckRequest(ack_id=str(i).zfill(176), seconds=60) + requests.ModAckRequest(ack_id=str(i).zfill(176), seconds=60, future=None) for i in range(5001) ] + manager.send_unary_modack.return_value = (items, []) dispatcher_.modify_ack_deadline(items) - calls = manager.send.call_args_list - assert len(calls) == 3 + calls = manager.send_unary_modack.call_args_list + assert len(calls) == 6 + + all_ack_ids = {item.ack_id for item in items} + sent_ack_ids = collections.Counter() + + for call in calls: + modack_ackids = list(call[1]["modify_deadline_ack_ids"]) + assert len(modack_ackids) <= dispatcher._ACK_IDS_BATCH_SIZE + sent_ack_ids.update(modack_ackids) + + assert set(sent_ack_ids) == all_ack_ids # all messages should have been MODACK-ed + assert sent_ack_ids.most_common(1)[0][1] == 1 # each message MODACK-ed exactly once + + +def test_modify_ack_deadline_splitting_large_payload_with_default_deadline(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + items = [ + # use realistic lengths for ACK IDs (max 176 bytes) + requests.ModAckRequest(ack_id=str(i).zfill(176), seconds=60, future=None) + for i in range(5001) + ] + manager.send_unary_modack.return_value = (items, []) + dispatcher_.modify_ack_deadline(items, 60) + + calls = manager.send_unary_modack.call_args_list + assert len(calls) == 6 all_ack_ids = {item.ack_id for item in items} sent_ack_ids = collections.Counter() for call in calls: - message = call.args[0] - assert message.ByteSize() <= 524288 # server-side limit (2**19) - sent_ack_ids.update(message.modify_deadline_ack_ids) + modack_ackids = list(call[1]["modify_deadline_ack_ids"]) + modack_deadline_seconds = call[1]["modify_deadline_seconds"] + default_deadline = call[1]["default_deadline"] + assert len(list(modack_ackids)) <= dispatcher._ACK_IDS_BATCH_SIZE + assert modack_deadline_seconds is None + assert default_deadline == 60 + sent_ack_ids.update(modack_ackids) assert set(sent_ack_ids) == all_ack_ids # all messages should have been MODACK-ed assert sent_ack_ids.most_common(1)[0][1] == 1 # each message MODACK-ed exactly once diff --git a/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py b/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py index 909337cc8..d10da6fb1 100644 --- a/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py +++ b/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py @@ -13,12 +13,22 @@ # limitations under the License. from __future__ import absolute_import +import sys + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock -import mock import pytest from google.cloud.pubsub_v1.subscriber import futures from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager +from google.cloud.pubsub_v1.subscriber.exceptions import ( + AcknowledgeError, + AcknowledgeStatus, +) class TestStreamingPullFuture(object): @@ -31,13 +41,12 @@ def make_future(self): def test_default_state(self): future = self.make_future() + manager = future._StreamingPullFuture__manager assert future.running() assert not future.done() assert not future.cancelled() - future._manager.add_close_callback.assert_called_once_with( - future._on_close_callback - ) + manager.add_close_callback.assert_called_once_with(future._on_close_callback) def test__on_close_callback_success(self): future = self.make_future() @@ -71,8 +80,36 @@ def test__on_close_callback_future_already_done(self): def test_cancel(self): future = self.make_future() + manager = future._StreamingPullFuture__manager future.cancel() - future._manager.close.assert_called_once() + manager.close.assert_called_once() assert future.cancelled() + + +class TestFuture(object): + def test_cancel(self): + future = futures.Future() + assert future.cancel() is False + + def test_cancelled(self): + future = futures.Future() + assert future.cancelled() is False + + def test_result_on_success(self): + future = futures.Future() + future.set_result(AcknowledgeStatus.SUCCESS) + assert future.result() == AcknowledgeStatus.SUCCESS + + def test_result_on_failure(self): + future = futures.Future() + future.set_exception( + AcknowledgeError( + AcknowledgeStatus.PERMISSION_DENIED, "Something bad happened." + ) + ) + with pytest.raises(AcknowledgeError) as e: + future.result() + assert e.value.error_code == AcknowledgeStatus.PERMISSION_DENIED + assert e.value.info == "Something bad happened." diff --git a/tests/unit/pubsub_v1/subscriber/test_heartbeater.py b/tests/unit/pubsub_v1/subscriber/test_heartbeater.py index 8f5049691..857152ac3 100644 --- a/tests/unit/pubsub_v1/subscriber/test_heartbeater.py +++ b/tests/unit/pubsub_v1/subscriber/test_heartbeater.py @@ -13,31 +13,64 @@ # limitations under the License. import logging +import sys import threading from google.cloud.pubsub_v1.subscriber._protocol import heartbeater from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager -import mock +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + import pytest -def test_heartbeat_inactive(caplog): - caplog.set_level(logging.INFO) +def test_heartbeat_inactive_manager_active_rpc( + caplog, modify_google_logger_propagation +): + caplog.set_level(logging.DEBUG) + + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + manager.is_active = False + manager.heartbeat.return_value = True # because of active rpc + + heartbeater_ = heartbeater.Heartbeater(manager) + make_sleep_mark_event_as_done(heartbeater_) + + heartbeater_.heartbeat() + + assert "Sent heartbeat" in caplog.text + assert "exiting" in caplog.text + + +def test_heartbeat_inactive_manager_inactive_rpc( + caplog, + modify_google_logger_propagation, +): + caplog.set_level(logging.DEBUG) + manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) manager.is_active = False + manager.heartbeat.return_value = False # because of inactive rpc heartbeater_ = heartbeater.Heartbeater(manager) + make_sleep_mark_event_as_done(heartbeater_) heartbeater_.heartbeat() + assert "Sent heartbeat" not in caplog.text assert "exiting" in caplog.text -def test_heartbeat_stopped(caplog): - caplog.set_level(logging.INFO) +def test_heartbeat_stopped(caplog, modify_google_logger_propagation): + caplog.set_level(logging.DEBUG) manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) @@ -47,17 +80,18 @@ def test_heartbeat_stopped(caplog): heartbeater_.heartbeat() + assert "Sent heartbeat" not in caplog.text assert "exiting" in caplog.text -def make_sleep_mark_manager_as_inactive(heartbeater): - # Make sleep mark the manager as inactive so that heartbeat() +def make_sleep_mark_event_as_done(heartbeater): + # Make sleep actually trigger the done event so that heartbeat() # exits at the end of the first run. - def trigger_inactive(timeout): + def trigger_done(timeout): assert timeout - heartbeater._manager.is_active = False + heartbeater._stop_event.set() - heartbeater._stop_event.wait = trigger_inactive + heartbeater._stop_event.wait = trigger_done def test_heartbeat_once(): @@ -65,7 +99,7 @@ def test_heartbeat_once(): streaming_pull_manager.StreamingPullManager, instance=True ) heartbeater_ = heartbeater.Heartbeater(manager) - make_sleep_mark_manager_as_inactive(heartbeater_) + make_sleep_mark_event_as_done(heartbeater_) heartbeater_.heartbeat() diff --git a/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/tests/unit/pubsub_v1/subscriber/test_helper_threads.py index 6e1bcc813..bfbaf3e56 100644 --- a/tests/unit/pubsub_v1/subscriber/test_helper_threads.py +++ b/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -12,8 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock -from six.moves import queue +import sys + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + +import queue from google.cloud.pubsub_v1.subscriber._protocol import helper_threads diff --git a/tests/unit/pubsub_v1/subscriber/test_histogram.py b/tests/unit/pubsub_v1/subscriber/test_histogram.py index d3c15cdce..aacdc3050 100644 --- a/tests/unit/pubsub_v1/subscriber/test_histogram.py +++ b/tests/unit/pubsub_v1/subscriber/test_histogram.py @@ -33,7 +33,7 @@ def test_contains(): def test_max(): histo = histogram.Histogram() - assert histo.max == 600 + assert histo.max == histogram.MAX_ACK_DEADLINE histo.add(120) assert histo.max == 120 histo.add(150) @@ -44,7 +44,7 @@ def test_max(): def test_min(): histo = histogram.Histogram() - assert histo.min == 10 + assert histo.min == histogram.MIN_ACK_DEADLINE histo.add(60) assert histo.min == 60 histo.add(30) @@ -63,20 +63,23 @@ def test_add(): def test_add_lower_limit(): histo = histogram.Histogram() - histo.add(5) - assert 5 not in histo - assert 10 in histo + low_value = histogram.MIN_ACK_DEADLINE - 1 + histo.add(low_value) + assert low_value not in histo + assert histogram.MIN_ACK_DEADLINE in histo def test_add_upper_limit(): histo = histogram.Histogram() - histo.add(12000) - assert 12000 not in histo - assert 600 in histo + high_value = histogram.MAX_ACK_DEADLINE + 1 + histo.add(high_value) + assert high_value not in histo + assert histogram.MAX_ACK_DEADLINE in histo def test_percentile(): histo = histogram.Histogram() + assert histo.percentile(42) == histogram.MIN_ACK_DEADLINE # default when empty [histo.add(i) for i in range(101, 201)] assert histo.percentile(100) == 200 assert histo.percentile(101) == 200 diff --git a/tests/unit/pubsub_v1/subscriber/test_leaser.py b/tests/unit/pubsub_v1/subscriber/test_leaser.py index 17409cb3f..606dcc2c9 100644 --- a/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -13,6 +13,7 @@ # limitations under the License. import logging +import sys import threading from google.cloud.pubsub_v1 import types @@ -21,8 +22,17 @@ from google.cloud.pubsub_v1.subscriber._protocol import leaser from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) +from google.cloud.pubsub_v1.subscriber import message + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock -import mock import pytest @@ -43,7 +53,7 @@ def test_add_and_remove(): assert leaser_.bytes == 25 -def test_add_already_managed(caplog): +def test_add_already_managed(caplog, modify_google_logger_propagation): caplog.set_level(logging.DEBUG) leaser_ = leaser.Leaser(mock.sentinel.manager) @@ -54,7 +64,7 @@ def test_add_already_managed(caplog): assert "already lease managed" in caplog.text -def test_remove_not_managed(caplog): +def test_remove_not_managed(caplog, modify_google_logger_propagation): caplog.set_level(logging.DEBUG) leaser_ = leaser.Leaser(mock.sentinel.manager) @@ -64,7 +74,7 @@ def test_remove_not_managed(caplog): assert "not managed" in caplog.text -def test_remove_negative_bytes(caplog): +def test_remove_negative_bytes(caplog, modify_google_logger_propagation): caplog.set_level(logging.DEBUG) leaser_ = leaser.Leaser(mock.sentinel.manager) @@ -84,63 +94,214 @@ def create_manager(flow_control=types.FlowControl()): manager.is_active = True manager.flow_control = flow_control manager.ack_histogram = histogram.Histogram() - manager.ack_deadline = 10 + manager._obtain_ack_deadline.return_value = 10 return manager -def test_maintain_leases_inactive(caplog): - caplog.set_level(logging.INFO) +def test_maintain_leases_inactive_manager(caplog, modify_google_logger_propagation): + caplog.set_level(logging.DEBUG) manager = create_manager() manager.is_active = False leaser_ = leaser.Leaser(manager) + make_sleep_mark_event_as_done(leaser_) + leaser_.add( + [requests.LeaseRequest(ack_id="my_ack_ID", byte_size=42, ordering_key="")] + ) + manager._send_lease_modacks.return_value = set() leaser_.maintain_leases() + # Leases should still be maintained even if the manager is inactive. + manager._send_lease_modacks.assert_called() assert "exiting" in caplog.text -def test_maintain_leases_stopped(caplog): - caplog.set_level(logging.INFO) +def test_maintain_leases_stopped(caplog, modify_google_logger_propagation): + caplog.set_level(logging.DEBUG) manager = create_manager() leaser_ = leaser.Leaser(manager) leaser_.stop() + manager._send_lease_modacks.return_value = set() leaser_.maintain_leases() assert "exiting" in caplog.text -def make_sleep_mark_manager_as_inactive(leaser): - # Make sleep mark the manager as inactive so that maintain_leases +def make_sleep_mark_event_as_done(leaser): + # Make sleep actually trigger the done event so that heartbeat() # exits at the end of the first run. - def trigger_inactive(timeout): + def trigger_done(timeout): assert 0 < timeout < 10 - leaser._manager.is_active = False + leaser._stop_event.set() + + leaser._stop_event.wait = trigger_done + + +def test_opentelemetry_dropped_message_process_span(span_exporter): + manager = create_manager() + leaser_ = leaser.Leaser(manager) + make_sleep_mark_event_as_done(leaser_) + msg = mock.create_autospec( + message.Message, instance=True, ack_id="ack_foo", size=10 + ) + msg.message_id = 3 + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + opentelemetry_data.start_process_span() + leaser_.add( + [ + requests.LeaseRequest( + ack_id="my ack id", + byte_size=50, + ordering_key="", + opentelemetry_data=opentelemetry_data, + ) + ] + ) + leased_messages_dict = leaser_._leased_messages - leaser._stop_event.wait = trigger_inactive + # Setting the `sent_time`` to be less than `cutoff` in order to make the leased message expire. + # This will exercise the code path where the message would be dropped from the leaser + leased_messages_dict["my ack id"] = leased_messages_dict["my ack id"]._replace( + sent_time=0 + ) + + manager._send_lease_modacks.return_value = set() + leaser_.maintain_leases() + + opentelemetry_data.end_subscribe_span() + spans = span_exporter.get_finished_spans() + assert len(spans) == 2 + process_span, subscribe_span = spans + + assert process_span.name == "subscriptionID process" + assert subscribe_span.name == "subscriptionID subscribe" + + assert len(process_span.events) == 1 + assert process_span.events[0].name == "expired" + + assert process_span.parent == subscribe_span.context + + +def test_opentelemetry_expired_message_exactly_once_process_span(span_exporter): + manager = create_manager() + leaser_ = leaser.Leaser(manager) + make_sleep_mark_event_as_done(leaser_) + msg = mock.create_autospec( + message.Message, instance=True, ack_id="ack_foo", size=10 + ) + msg.message_id = 3 + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id", + delivery_attempt=4, + ) + opentelemetry_data.start_process_span() + leaser_.add( + [ + requests.LeaseRequest( + ack_id="my ack id", + byte_size=50, + ordering_key="", + opentelemetry_data=opentelemetry_data, + ) + ] + ) + + manager._send_lease_modacks.return_value = ["my ack id"] + leaser_.maintain_leases() + + opentelemetry_data.end_subscribe_span() + spans = span_exporter.get_finished_spans() + assert len(spans) == 2 + process_span, subscribe_span = spans + + assert process_span.name == "subscriptionID process" + assert subscribe_span.name == "subscriptionID subscribe" + + assert len(process_span.events) == 1 + assert process_span.events[0].name == "expired" + + assert process_span.parent == subscribe_span.context def test_maintain_leases_ack_ids(): manager = create_manager() leaser_ = leaser.Leaser(manager) - make_sleep_mark_manager_as_inactive(leaser_) + make_sleep_mark_event_as_done(leaser_) leaser_.add( [requests.LeaseRequest(ack_id="my ack id", byte_size=50, ordering_key="")] ) + manager._send_lease_modacks.return_value = set() leaser_.maintain_leases() - manager.dispatcher.modify_ack_deadline.assert_called_once_with( - [requests.ModAckRequest(ack_id="my ack id", seconds=10)] + assert len(manager._send_lease_modacks.mock_calls) == 1 + call = manager._send_lease_modacks.mock_calls[0] + ack_ids = list(call.args[0]) + assert ack_ids == ["my ack id"] + assert call.args[1] == 10 + + +def test_maintain_leases_expired_ack_ids_ignored(): + manager = create_manager() + leaser_ = leaser.Leaser(manager) + make_sleep_mark_event_as_done(leaser_) + leaser_.add( + [requests.LeaseRequest(ack_id="my ack id", byte_size=50, ordering_key="")] ) + manager._exactly_once_delivery_enabled.return_value = False + manager._send_lease_modacks.return_value = set(["my ack id"]) + leaser_.maintain_leases() + + assert len(manager._send_lease_modacks.mock_calls) == 1 + + call = manager._send_lease_modacks.mock_calls[0] + ack_ids = list(call.args[0]) + assert ack_ids == ["my ack id"] + assert call.args[1] == 10 + + +def test_maintain_leases_expired_ack_ids_exactly_once(): + manager = create_manager() + leaser_ = leaser.Leaser(manager) + make_sleep_mark_event_as_done(leaser_) + leaser_.add( + [requests.LeaseRequest(ack_id="my ack id", byte_size=50, ordering_key="")] + ) + manager._exactly_once_delivery_enabled.return_value = True + manager._send_lease_modacks.return_value = set(["my ack id"]) + leaser_.maintain_leases() + + assert len(manager._send_lease_modacks.mock_calls) == 1 + + call = manager._send_lease_modacks.mock_calls[0] + ack_ids = list(call.args[0]) + assert ack_ids == ["my ack id"] + assert call.args[1] == 10 + + assert len(manager.dispatcher.drop.mock_calls) == 1 + call = manager.dispatcher.drop.mock_calls[0] + drop_requests = list(call.args[0]) + assert drop_requests[0].ack_id == "my ack id" + assert drop_requests[0].byte_size == 50 + assert drop_requests[0].ordering_key == "" def test_maintain_leases_no_ack_ids(): manager = create_manager() leaser_ = leaser.Leaser(manager) - make_sleep_mark_manager_as_inactive(leaser_) + make_sleep_mark_event_as_done(leaser_) leaser_.maintain_leases() @@ -151,7 +312,7 @@ def test_maintain_leases_no_ack_ids(): def test_maintain_leases_outdated_items(time): manager = create_manager() leaser_ = leaser.Leaser(manager) - make_sleep_mark_manager_as_inactive(leaser_) + make_sleep_mark_event_as_done(leaser_) # Add and start expiry timer at the beginning of the timeline. time.return_value = 0 @@ -173,17 +334,15 @@ def test_maintain_leases_outdated_items(time): # Now make sure time reports that we are past the end of our timeline. time.return_value = manager.flow_control.max_lease_duration + 1 + manager._send_lease_modacks.return_value = set() leaser_.maintain_leases() # ack2, ack3, and ack4 should be renewed. ack1 should've been dropped - modacks = manager.dispatcher.modify_ack_deadline.call_args.args[0] - expected = [ - requests.ModAckRequest(ack_id="ack2", seconds=10), - requests.ModAckRequest(ack_id="ack3", seconds=10), - requests.ModAckRequest(ack_id="ack4", seconds=10), - ] - # Use sorting to allow for ordering variance. - assert sorted(modacks) == sorted(expected) + assert len(manager._send_lease_modacks.mock_calls) == 1 + call = manager._send_lease_modacks.mock_calls[0] + ack_ids = list(call.args[0]) + assert ack_ids == ["ack2", "ack3", "ack4"] + assert call.args[1] == 10 manager.dispatcher.drop.assert_called_once_with( [requests.DropRequest(ack_id="ack1", byte_size=50, ordering_key="")] diff --git a/tests/unit/pubsub_v1/subscriber/test_message.py b/tests/unit/pubsub_v1/subscriber/test_message.py index 0c8a6d181..03bdc1514 100644 --- a/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/tests/unit/pubsub_v1/subscriber/test_message.py @@ -13,42 +13,61 @@ # limitations under the License. import datetime +import queue +import sys import time -import mock -import pytz -from six.moves import queue -from google.protobuf import timestamp_pb2 +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock from google.api_core import datetime_helpers -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import message from google.cloud.pubsub_v1.subscriber._protocol import requests +from google.protobuf import timestamp_pb2 +from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.subscriber.exceptions import AcknowledgeStatus +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) -RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=pytz.utc) +RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=datetime.timezone.utc) RECEIVED_SECONDS = datetime_helpers.to_milliseconds(RECEIVED) // 1000 PUBLISHED_MICROS = 123456 PUBLISHED = RECEIVED + datetime.timedelta(days=1, microseconds=PUBLISHED_MICROS) PUBLISHED_SECONDS = datetime_helpers.to_milliseconds(PUBLISHED) // 1000 -def create_message(data, ack_id="ACKID", delivery_attempt=0, ordering_key="", **attrs): +def create_message( + data, + ack_id="ACKID", + delivery_attempt=0, + ordering_key="", + exactly_once_delivery_enabled=False, + **attrs +): with mock.patch.object(time, "time") as time_: time_.return_value = RECEIVED_SECONDS - msg = message.Message( - message=types.PubsubMessage( - attributes=attrs, - data=data, - message_id="message_id", - publish_time=timestamp_pb2.Timestamp( - seconds=PUBLISHED_SECONDS, nanos=PUBLISHED_MICROS * 1000 - ), - ordering_key=ordering_key, + gapic_pubsub_message = gapic_types.PubsubMessage( + attributes=attrs, + data=data, + message_id="message_id", + publish_time=timestamp_pb2.Timestamp( + seconds=PUBLISHED_SECONDS, nanos=PUBLISHED_MICROS * 1000 ), + ordering_key=ordering_key, + ) + msg = message.Message( + # The code under test uses a raw protobuf PubsubMessage, i.e. w/o additional + # Python class wrappers, hence the "_pb" + message=gapic_pubsub_message._pb, ack_id=ack_id, delivery_attempt=delivery_attempt, request_queue=queue.Queue(), + exactly_once_delivery_enabled_func=lambda: exactly_once_delivery_enabled, ) return msg @@ -113,9 +132,155 @@ def check_call_types(mock, *args, **kwargs): assert len(call_args) == len(args) for n, argtype in enumerate(args): assert isinstance(call_args[n], argtype) - for argname, argtype in kwargs: - assert argname in call_kwargs - assert isinstance(call_kwargs[argname], argtype) + + +def test_opentelemetry_ack(span_exporter): + SUBSCRIPTION = "projects/projectID/subscriptions/subscriptionID" + msg = create_message(b"data", ack_id="ack_id") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription=SUBSCRIPTION, + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=2, + ) + opentelemetry_data.start_process_span() + msg.opentelemetry_data = opentelemetry_data + msg.ack() + opentelemetry_data.end_subscribe_span() + + spans = span_exporter.get_finished_spans() + assert len(spans) == 2 + process_span, subscribe_span = spans + + assert subscribe_span.name == "subscriptionID subscribe" + assert len(subscribe_span.events) == 0 + + assert process_span.name == "subscriptionID process" + assert len(process_span.events) == 1 + assert process_span.events[0].name == "ack called" + + +def test_opentelemetry_ack_with_response(span_exporter): + SUBSCRIPTION = "projects/projectID/subscriptions/subscriptionID" + msg = create_message(b"data", ack_id="ack_id") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription=SUBSCRIPTION, + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=2, + ) + opentelemetry_data.start_process_span() + msg.opentelemetry_data = opentelemetry_data + msg.ack_with_response() + opentelemetry_data.end_subscribe_span() + + spans = span_exporter.get_finished_spans() + assert len(spans) == 2 + process_span, subscribe_span = spans + + assert subscribe_span.name == "subscriptionID subscribe" + assert len(subscribe_span.events) == 0 + + assert process_span.name == "subscriptionID process" + assert len(process_span.events) == 1 + assert process_span.events[0].name == "ack called" + + +def test_opentelemetry_nack(span_exporter): + SUBSCRIPTION = "projects/projectID/subscriptions/subscriptionID" + msg = create_message(b"data", ack_id="ack_id") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription=SUBSCRIPTION, + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=2, + ) + opentelemetry_data.start_process_span() + msg.opentelemetry_data = opentelemetry_data + msg.nack() + opentelemetry_data.end_subscribe_span() + + spans = span_exporter.get_finished_spans() + assert len(spans) == 2 + process_span, subscribe_span = spans + + assert subscribe_span.name == "subscriptionID subscribe" + assert len(subscribe_span.events) == 0 + + assert process_span.name == "subscriptionID process" + assert len(process_span.events) == 1 + assert process_span.events[0].name == "nack called" + + +def test_opentelemetry_nack_with_response(span_exporter): + SUBSCRIPTION = "projects/projectID/subscriptions/subscriptionID" + msg = create_message(b"data", ack_id="ack_id") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription=SUBSCRIPTION, + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=2, + ) + opentelemetry_data.start_process_span() + msg.opentelemetry_data = opentelemetry_data + msg.nack_with_response() + opentelemetry_data.end_subscribe_span() + + spans = span_exporter.get_finished_spans() + assert len(spans) == 2 + + process_span, subscribe_span = spans + + assert subscribe_span.name == "subscriptionID subscribe" + assert len(subscribe_span.events) == 0 + + assert process_span.name == "subscriptionID process" + assert len(process_span.events) == 1 + assert process_span.events[0].name == "nack called" + + +def test_opentelemetry_modack(span_exporter): + SUBSCRIPTION = "projects/projectID/subscriptions/subscriptionID" + msg = create_message(b"data", ack_id="ack_id") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription=SUBSCRIPTION, + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=2, + ) + msg.opentelemetry_data = opentelemetry_data + msg.modify_ack_deadline(3) + opentelemetry_data.end_subscribe_span() + + spans = span_exporter.get_finished_spans() + assert len(spans) == 1 + + assert len(spans[0].events) == 0 + + +def test_opentelemetry_modack_with_response(span_exporter): + SUBSCRIPTION = "projects/projectID/subscriptions/subscriptionID" + msg = create_message(b"data", ack_id="ack_id") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription=SUBSCRIPTION, + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=2, + ) + msg.opentelemetry_data = opentelemetry_data + msg.modify_ack_deadline_with_response(3) + opentelemetry_data.end_subscribe_span() + + spans = span_exporter.get_finished_spans() + assert len(spans) == 1 + + assert len(spans[0].events) == 0 def test_ack(): @@ -124,10 +289,50 @@ def test_ack(): msg.ack() put.assert_called_once_with( requests.AckRequest( + message_id=msg.message_id, + ack_id="bogus_ack_id", + byte_size=30, + time_to_ack=mock.ANY, + ordering_key="", + future=None, + ) + ) + check_call_types(put, requests.AckRequest) + + +def test_ack_with_response_exactly_once_delivery_disabled(): + msg = create_message(b"foo", ack_id="bogus_ack_id") + with mock.patch.object(msg._request_queue, "put") as put: + future = msg.ack_with_response() + put.assert_called_once_with( + requests.AckRequest( + message_id=msg.message_id, + ack_id="bogus_ack_id", + byte_size=30, + time_to_ack=mock.ANY, + ordering_key="", + future=None, + ) + ) + assert future.result() == AcknowledgeStatus.SUCCESS + assert future == message._SUCCESS_FUTURE + check_call_types(put, requests.AckRequest) + + +def test_ack_with_response_exactly_once_delivery_enabled(): + msg = create_message( + b"foo", ack_id="bogus_ack_id", exactly_once_delivery_enabled=True + ) + with mock.patch.object(msg._request_queue, "put") as put: + future = msg.ack_with_response() + put.assert_called_once_with( + requests.AckRequest( + message_id=msg.message_id, ack_id="bogus_ack_id", byte_size=30, time_to_ack=mock.ANY, ordering_key="", + future=future, ) ) check_call_types(put, requests.AckRequest) @@ -148,7 +353,46 @@ def test_modify_ack_deadline(): with mock.patch.object(msg._request_queue, "put") as put: msg.modify_ack_deadline(60) put.assert_called_once_with( - requests.ModAckRequest(ack_id="bogus_ack_id", seconds=60) + requests.ModAckRequest( + message_id=msg.message_id, + ack_id="bogus_ack_id", + seconds=60, + future=None, + ) + ) + check_call_types(put, requests.ModAckRequest) + + +def test_modify_ack_deadline_with_response_exactly_once_delivery_disabled(): + msg = create_message(b"foo", ack_id="bogus_ack_id") + with mock.patch.object(msg._request_queue, "put") as put: + future = msg.modify_ack_deadline_with_response(60) + put.assert_called_once_with( + requests.ModAckRequest( + message_id=msg.message_id, + ack_id="bogus_ack_id", + seconds=60, + future=None, + ) + ) + assert future.result() == AcknowledgeStatus.SUCCESS + assert future == message._SUCCESS_FUTURE + check_call_types(put, requests.ModAckRequest) + + +def test_modify_ack_deadline_with_response_exactly_once_delivery_enabled(): + msg = create_message( + b"foo", ack_id="bogus_ack_id", exactly_once_delivery_enabled=True + ) + with mock.patch.object(msg._request_queue, "put") as put: + future = msg.modify_ack_deadline_with_response(60) + put.assert_called_once_with( + requests.ModAckRequest( + message_id=msg.message_id, + ack_id="bogus_ack_id", + seconds=60, + future=future, + ) ) check_call_types(put, requests.ModAckRequest) @@ -158,7 +402,37 @@ def test_nack(): with mock.patch.object(msg._request_queue, "put") as put: msg.nack() put.assert_called_once_with( - requests.NackRequest(ack_id="bogus_ack_id", byte_size=30, ordering_key="") + requests.NackRequest( + ack_id="bogus_ack_id", byte_size=30, ordering_key="", future=None + ) + ) + check_call_types(put, requests.NackRequest) + + +def test_nack_with_response_exactly_once_delivery_disabled(): + msg = create_message(b"foo", ack_id="bogus_ack_id") + with mock.patch.object(msg._request_queue, "put") as put: + future = msg.nack_with_response() + put.assert_called_once_with( + requests.NackRequest( + ack_id="bogus_ack_id", byte_size=30, ordering_key="", future=None + ) + ) + assert future.result() == AcknowledgeStatus.SUCCESS + assert future == message._SUCCESS_FUTURE + check_call_types(put, requests.NackRequest) + + +def test_nack_with_response_exactly_once_delivery_enabled(): + msg = create_message( + b"foo", ack_id="bogus_ack_id", exactly_once_delivery_enabled=True + ) + with mock.patch.object(msg._request_queue, "put") as put: + future = msg.nack_with_response() + put.assert_called_once_with( + requests.NackRequest( + ack_id="bogus_ack_id", byte_size=30, ordering_key="", future=future + ) ) check_call_types(put, requests.NackRequest) diff --git a/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py b/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py index 946e2598a..0f060e4ea 100644 --- a/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py +++ b/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py @@ -12,16 +12,27 @@ # See the License for the specific language governing permissions and # limitations under the License. -from six.moves import queue +import queue -from google.cloud.pubsub_v1 import types +from opentelemetry import trace + +from google.pubsub_v1 import types as gapic_types from google.cloud.pubsub_v1.subscriber import message from google.cloud.pubsub_v1.subscriber._protocol import messages_on_hold +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) def make_message(ack_id, ordering_key): - proto_msg = types.PubsubMessage(data=b"Q", ordering_key=ordering_key) - return message.Message(proto_msg, ack_id, 0, queue.Queue()) + proto_msg = gapic_types.PubsubMessage(data=b"Q", ordering_key=ordering_key) + return message.Message( + proto_msg._pb, + ack_id, + 0, + queue.Queue(), + exactly_once_delivery_enabled_func=lambda: False, # pragma: NO COVER + ) def test_init(): @@ -31,6 +42,32 @@ def test_init(): assert moh.get() is None +def test_opentelemetry_subscriber_scheduler_span(span_exporter): + moh = messages_on_hold.MessagesOnHold() + msg = make_message(ack_id="ack1", ordering_key="") + opentelemetry_data = SubscribeOpenTelemetry(msg) + msg.opentelemetry_data = opentelemetry_data + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + moh.put(msg) + opentelemetry_data.end_subscribe_scheduler_span() + opentelemetry_data.end_subscribe_span() + + spans = span_exporter.get_finished_spans() + + assert len(spans) == 2 + + subscribe_scheduler_span, subscribe_span = spans + + assert subscribe_scheduler_span.name == "subscriber scheduler" + assert subscribe_scheduler_span.kind == trace.SpanKind.INTERNAL + assert subscribe_scheduler_span.parent == subscribe_span.context + + def test_put_and_get_unordered_messages(): moh = messages_on_hold.MessagesOnHold() @@ -103,6 +140,72 @@ def test_ordered_messages_one_key(): assert moh.size == 0 +def test_ordered_messages_drop_duplicate_keys(caplog, modify_google_logger_propagation): + moh = messages_on_hold.MessagesOnHold() + + msg1 = make_message(ack_id="ack1", ordering_key="key1") + moh.put(msg1) + assert moh.size == 1 + + msg2 = make_message(ack_id="ack2", ordering_key="key1") + moh.put(msg2) + assert moh.size == 2 + + # Get first message for "key1" + assert moh.get() == msg1 + assert moh.size == 1 + + # Still waiting on the previously-sent message for "key1", and there are no + # other messages, so return None. + assert moh.get() is None + assert moh.size == 1 + + # Activate "key1". + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1", "key1"], callback_tracker) + assert callback_tracker.called + assert callback_tracker.message == msg2 + assert moh.size == 0 + assert len(moh._pending_ordered_messages) == 0 + + # Activate "key1" again + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert not callback_tracker.called + + # Activate "key1" again. There are no other messages for that key, so clean + # up state for that key. + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert not callback_tracker.called + + msg3 = make_message(ack_id="ack3", ordering_key="key1") + moh.put(msg3) + assert moh.size == 1 + + # Get next message for "key1" + assert moh.get() == msg3 + assert moh.size == 0 + + # Activate "key1". + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert not callback_tracker.called + + # Activate "key1" again. There are no other messages for that key, so clean + # up state for that key. + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert not callback_tracker.called + + # Activate "key1" again after being cleaned up. There are no other messages for that key, so clean + # up state for that key. + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert not callback_tracker.called + assert "No message queue exists for message ordering key: key1" in caplog.text + + def test_ordered_messages_two_keys(): moh = messages_on_hold.MessagesOnHold() @@ -272,3 +375,39 @@ def test_ordered_and_unordered_messages_interleaved(): # No messages left. assert moh.get() is None assert moh.size == 0 + + +def test_cleanup_nonexistent_key(caplog, modify_google_logger_propagation): + moh = messages_on_hold.MessagesOnHold() + moh._clean_up_ordering_key("non-existent-key") + assert ( + "Tried to clean up ordering key that does not exist: non-existent-key" + in caplog.text + ) + + +def test_cleanup_key_with_messages(caplog, modify_google_logger_propagation): + moh = messages_on_hold.MessagesOnHold() + + # Put message with "key1". + msg1 = make_message(ack_id="ack1", ordering_key="key1") + moh.put(msg1) + assert moh.size == 1 + + # Put another message "key1" + msg2 = make_message(ack_id="ack2", ordering_key="key1") + moh.put(msg2) + assert moh.size == 2 + + # Get first message for "key1" + assert moh.get() == msg1 + assert moh.size == 1 + + # Get first message for "key1" + assert moh.get() is None + assert moh.size == 1 + + moh._clean_up_ordering_key("key1") + assert ( + "Tried to clean up ordering key: key1 with 1 messages remaining." in caplog.text + ) diff --git a/tests/unit/pubsub_v1/subscriber/test_scheduler.py b/tests/unit/pubsub_v1/subscriber/test_scheduler.py index 774d0d63e..3ed1978c1 100644 --- a/tests/unit/pubsub_v1/subscriber/test_scheduler.py +++ b/tests/unit/pubsub_v1/subscriber/test_scheduler.py @@ -13,10 +13,17 @@ # limitations under the License. import concurrent.futures +import queue +import pytest +import sys import threading +import time -import mock -from six.moves import queue +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock from google.cloud.pubsub_v1.subscriber import scheduler @@ -38,19 +45,136 @@ def test_constructor_options(): assert scheduler_._executor == mock.sentinel.executor -def test_schedule(): +def test_schedule_executes_submitted_items(): called_with = [] - called = threading.Event() + callback_done_twice = threading.Barrier(3) # 3 == 2x callback + 1x main thread def callback(*args, **kwargs): - called_with.append((args, kwargs)) - called.set() + called_with.append((args, kwargs)) # appends are thread-safe + callback_done_twice.wait() scheduler_ = scheduler.ThreadScheduler() scheduler_.schedule(callback, "arg1", kwarg1="meep") + scheduler_.schedule(callback, "arg2", kwarg2="boop") - called.wait() - scheduler_.shutdown() + callback_done_twice.wait(timeout=3.0) + result = scheduler_.shutdown() - assert called_with == [(("arg1",), {"kwarg1": "meep"})] + assert result == [] # no scheduled items dropped + + expected_calls = [(("arg1",), {"kwarg1": "meep"}), (("arg2",), {"kwarg2": "boop"})] + assert sorted(called_with) == expected_calls + + +def test_schedule_after_executor_shutdown_warning(): + def callback(*args, **kwargs): + pass + + scheduler_ = scheduler.ThreadScheduler() + + scheduler_.schedule(callback, "arg1", kwarg1="meep") + scheduler_._executor.shutdown() + + with pytest.warns( + RuntimeWarning, match="Scheduling a callback after executor shutdown" + ) as warned: + scheduler_.schedule(callback, "arg2", kwarg2="boop") + + assert len(warned) == 1 + assert issubclass(warned[0].category, RuntimeWarning) + warning_msg = str(warned[0].message) + assert "after executor shutdown" in warning_msg + + +def test_shutdown_nonblocking_by_default(): + called_with = [] + at_least_one_called = threading.Event() + at_least_one_completed = threading.Event() + + def callback(message): + called_with.append(message) # appends are thread-safe + at_least_one_called.set() + time.sleep(1.0) + at_least_one_completed.set() + + executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) + scheduler_ = scheduler.ThreadScheduler(executor=executor) + + scheduler_.schedule(callback, "message_1") + scheduler_.schedule(callback, "message_2") + + at_least_one_called.wait() + dropped = scheduler_.shutdown() + + assert len(called_with) == 1 + assert called_with[0] in {"message_1", "message_2"} + + assert len(dropped) == 1 + assert dropped[0] in {"message_1", "message_2"} + assert dropped[0] != called_with[0] # the dropped message was not the processed one + + err_msg = ( + "Shutdown should not have waited " + "for the already running callbacks to complete." + ) + assert not at_least_one_completed.is_set(), err_msg + + +def test_shutdown_blocking_awaits_running_callbacks(): + called_with = [] + at_least_one_called = threading.Event() + at_least_one_completed = threading.Event() + + def callback(message): + called_with.append(message) # appends are thread-safe + at_least_one_called.set() + time.sleep(1.0) + at_least_one_completed.set() + + executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) + scheduler_ = scheduler.ThreadScheduler(executor=executor) + + scheduler_.schedule(callback, "message_1") + scheduler_.schedule(callback, "message_2") + + at_least_one_called.wait() + dropped = scheduler_.shutdown(await_msg_callbacks=True) + + assert len(called_with) == 1 + assert called_with[0] in {"message_1", "message_2"} + + # The work items that have not been started yet should still be dropped. + assert len(dropped) == 1 + assert dropped[0] in {"message_1", "message_2"} + assert dropped[0] != called_with[0] # the dropped message was not the processed one + + err_msg = "Shutdown did not wait for the already running callbacks to complete." + assert at_least_one_completed.is_set(), err_msg + + +def test_shutdown_handles_executor_queue_sentinels(): + at_least_one_called = threading.Event() + + def callback(_): + at_least_one_called.set() + time.sleep(1.0) + + executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) + scheduler_ = scheduler.ThreadScheduler(executor=executor) + + scheduler_.schedule(callback, "message_1") + scheduler_.schedule(callback, "message_2") + scheduler_.schedule(callback, "message_3") + + # Simulate executor shutdown from another thread. + executor._work_queue.put(None) + executor._work_queue.put(None) + + at_least_one_called.wait() + dropped = scheduler_.shutdown(await_msg_callbacks=True) + + assert len(set(dropped)) == 2 # Also test for item uniqueness. + for msg in dropped: + assert msg is not None + assert msg.startswith("message_") diff --git a/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 3f2881df6..b9561d747 100644 --- a/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -13,17 +13,35 @@ # limitations under the License. import logging +import sys import threading import time import types as stdlib_types +import datetime +import queue +import math -import mock +from opentelemetry import trace +from google.protobuf import timestamp_pb2 +from google.api_core import datetime_helpers + +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) +from google.cloud.pubsub_v1.subscriber.message import Message +from google.cloud.pubsub_v1.types import PubsubMessage + + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock import pytest from google.api_core import bidi from google.api_core import exceptions from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.gapic import subscriber_client_config from google.cloud.pubsub_v1.subscriber import client from google.cloud.pubsub_v1.subscriber import message from google.cloud.pubsub_v1.subscriber import scheduler @@ -33,7 +51,22 @@ from google.cloud.pubsub_v1.subscriber._protocol import messages_on_hold from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager +from google.cloud.pubsub_v1.subscriber import exceptions as subscriber_exceptions +from google.cloud.pubsub_v1.subscriber import futures +from google.pubsub_v1 import types as gapic_types import grpc +from google.rpc import status_pb2 +from google.rpc import code_pb2 +from google.rpc import error_details_pb2 + + +def create_mock_message(**kwargs): + _message_mock = mock.create_autospec(message.Message, instance=True) + msg = _message_mock.return_value + for k, v in kwargs.items(): + setattr(msg, k, v) + + return msg @pytest.mark.parametrize( @@ -44,16 +77,18 @@ mock.create_autospec(grpc.RpcError, instance=True), exceptions.GoogleAPICallError, ), + ({"error": "RPC terminated"}, Exception), + ("something broke", Exception), ], ) -def test__maybe_wrap_exception(exception, expected_cls): +def test__wrap_as_exception(exception, expected_cls): assert isinstance( - streaming_pull_manager._maybe_wrap_exception(exception), expected_cls + streaming_pull_manager._wrap_as_exception(exception), expected_cls ) def test__wrap_callback_errors_no_error(): - msg = mock.create_autospec(message.Message, instance=True) + msg = create_mock_message() callback = mock.Mock() on_callback_error = mock.Mock() @@ -64,10 +99,15 @@ def test__wrap_callback_errors_no_error(): on_callback_error.assert_not_called() -def test__wrap_callback_errors_error(): - callback_error = ValueError("meep") - - msg = mock.create_autospec(message.Message, instance=True) +@pytest.mark.parametrize( + "callback_error", + [ + (ValueError("ValueError")), + (BaseException("BaseException")), + ], +) +def test__wrap_callback_errors_error(callback_error): + msg = create_mock_message() callback = mock.Mock(side_effect=callback_error) on_callback_error = mock.Mock() @@ -78,6 +118,7 @@ def test__wrap_callback_errors_error(): def test_constructor_and_default_state(): + mock.sentinel.subscription = str() manager = streaming_pull_manager.StreamingPullManager( mock.sentinel.client, mock.sentinel.subscription ) @@ -99,26 +140,90 @@ def test_constructor_and_default_state(): assert manager._client_id is not None -def test_constructor_with_options(): +def test_constructor_with_default_options(): + mock.sentinel.subscription = str() + flow_control_ = types.FlowControl() manager = streaming_pull_manager.StreamingPullManager( mock.sentinel.client, mock.sentinel.subscription, - flow_control=mock.sentinel.flow_control, + flow_control=flow_control_, scheduler=mock.sentinel.scheduler, ) - assert manager.flow_control == mock.sentinel.flow_control + assert manager.flow_control == flow_control_ assert manager._scheduler == mock.sentinel.scheduler + assert manager._ack_deadline == 10 + assert manager._stream_ack_deadline == 60 + + +def test_constructor_with_min_and_max_duration_per_lease_extension_(): + mock.sentinel.subscription = str() + flow_control_ = types.FlowControl( + min_duration_per_lease_extension=15, max_duration_per_lease_extension=20 + ) + manager = streaming_pull_manager.StreamingPullManager( + mock.sentinel.client, + mock.sentinel.subscription, + flow_control=flow_control_, + scheduler=mock.sentinel.scheduler, + ) + assert manager._ack_deadline == 15 + assert manager._stream_ack_deadline == 20 + + +def test_constructor_with_min_duration_per_lease_extension_too_low(): + mock.sentinel.subscription = str() + flow_control_ = types.FlowControl( + min_duration_per_lease_extension=9, max_duration_per_lease_extension=9 + ) + manager = streaming_pull_manager.StreamingPullManager( + mock.sentinel.client, + mock.sentinel.subscription, + flow_control=flow_control_, + scheduler=mock.sentinel.scheduler, + ) + assert manager._ack_deadline == 10 + assert manager._stream_ack_deadline == 10 + + +def test_constructor_with_max_duration_per_lease_extension_too_high(): + mock.sentinel.subscription = str() + flow_control_ = types.FlowControl( + max_duration_per_lease_extension=601, min_duration_per_lease_extension=601 + ) + manager = streaming_pull_manager.StreamingPullManager( + mock.sentinel.client, + mock.sentinel.subscription, + flow_control=flow_control_, + scheduler=mock.sentinel.scheduler, + ) + assert manager._ack_deadline == 600 + assert manager._stream_ack_deadline == 600 -def make_manager(**kwargs): +def make_manager( + enable_open_telemetry: bool = False, + subscription_name: str = "subscription-name", + **kwargs, +): client_ = mock.create_autospec(client.Client, instance=True) + client_.open_telemetry_enabled = enable_open_telemetry scheduler_ = mock.create_autospec(scheduler.Scheduler, instance=True) return streaming_pull_manager.StreamingPullManager( - client_, "subscription-name", scheduler=scheduler_, **kwargs + client_, subscription_name, scheduler=scheduler_, **kwargs ) +def complete_modify_ack_deadline_calls(dispatcher): + def complete_futures(*args, **kwargs): + modack_requests = args[0] + for req in modack_requests: + if req.future: + req.future.set_result(subscriber_exceptions.AcknowledgeStatus.SUCCESS) + + dispatcher.modify_ack_deadline.side_effect = complete_futures + + def fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=10): """Add a simplified fake add() method to a leaser instance. @@ -136,23 +241,161 @@ def fake_add(self, items): leaser.add = stdlib_types.MethodType(fake_add, leaser) -def test_ack_deadline(): +def test__obtain_ack_deadline_no_custom_flow_control_setting(): + from google.cloud.pubsub_v1.subscriber._protocol import histogram + manager = make_manager() - assert manager.ack_deadline == 10 - manager.ack_histogram.add(20) - assert manager.ack_deadline == 20 - manager.ack_histogram.add(10) - assert manager.ack_deadline == 20 + + # Make sure that min_duration_per_lease_extension and + # max_duration_per_lease_extension is disabled. + manager._flow_control = types.FlowControl( + min_duration_per_lease_extension=0, max_duration_per_lease_extension=0 + ) + assert manager._stream_ack_deadline == 60 + assert manager._ack_deadline == 10 + assert manager._obtain_ack_deadline(maybe_update=False) == 10 + + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == histogram.MIN_ACK_DEADLINE + assert manager._stream_ack_deadline == 60 + + # When we get some historical data, the deadline is adjusted. + manager.ack_histogram.add(histogram.MIN_ACK_DEADLINE * 2) + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == histogram.MIN_ACK_DEADLINE * 2 + + # Adding just a single additional data point does not yet change the deadline. + manager.ack_histogram.add(histogram.MIN_ACK_DEADLINE) + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == histogram.MIN_ACK_DEADLINE * 2 + + +def test__obtain_ack_deadline_with_max_duration_per_lease_extension(): + from google.cloud.pubsub_v1.subscriber._protocol import histogram + + manager = make_manager() + manager._flow_control = types.FlowControl( + max_duration_per_lease_extension=histogram.MIN_ACK_DEADLINE + 1 + ) + assert manager._ack_deadline == 10 + + manager.ack_histogram.add(histogram.MIN_ACK_DEADLINE * 3) # make p99 value large + + # The deadline configured in flow control should prevail. + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == histogram.MIN_ACK_DEADLINE + 1 + assert manager._stream_ack_deadline == 60 + + +def test__obtain_ack_deadline_with_min_duration_per_lease_extension(): + from google.cloud.pubsub_v1.subscriber._protocol import histogram + + manager = make_manager() + manager._flow_control = types.FlowControl( + min_duration_per_lease_extension=histogram.MAX_ACK_DEADLINE + ) + manager.ack_histogram.add(histogram.MIN_ACK_DEADLINE) # make p99 value small + + # The deadline configured in flow control should prevail. + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == histogram.MAX_ACK_DEADLINE + assert manager._stream_ack_deadline == histogram.MAX_ACK_DEADLINE + + +def test__obtain_ack_deadline_with_max_duration_per_lease_extension_too_low(): + from google.cloud.pubsub_v1.subscriber._protocol import histogram + + manager = make_manager() + manager._flow_control = types.FlowControl( + max_duration_per_lease_extension=histogram.MIN_ACK_DEADLINE - 1 + ) + + # The deadline configured in flow control should be adjusted to the minimum allowed. + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == histogram.MIN_ACK_DEADLINE + + +def test__obtain_ack_deadline_with_min_duration_per_lease_extension_too_high(): + from google.cloud.pubsub_v1.subscriber._protocol import histogram + + manager = make_manager() + manager._flow_control = types.FlowControl( + min_duration_per_lease_extension=histogram.MAX_ACK_DEADLINE + 1 + ) + + # The deadline configured in flow control should be adjusted to the maximum allowed. + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == histogram.MAX_ACK_DEADLINE + assert manager._stream_ack_deadline == histogram.MAX_ACK_DEADLINE + + +def test__obtain_ack_deadline_with_exactly_once_enabled(): + manager = make_manager() + manager._flow_control = types.FlowControl( + min_duration_per_lease_extension=0 # leave as default value + ) + manager._exactly_once_enabled = True + manager.ack_histogram.add( + 10 + ) # reduce p99 value below 60s min for exactly_once subscriptions + + deadline = manager._obtain_ack_deadline(maybe_update=True) + # Since the 60-second min ack_deadline value for exactly_once subscriptions + # seconds is higher than the histogram value, the deadline should be 60 sec. + assert deadline == 60 + assert manager._stream_ack_deadline == 60 + + +def test__obtain_ack_deadline_with_min_duration_per_lease_extension_with_exactly_once_enabled(): + from google.cloud.pubsub_v1.subscriber._protocol import histogram + + manager = make_manager() + manager._flow_control = types.FlowControl( + min_duration_per_lease_extension=histogram.MAX_ACK_DEADLINE + ) + manager._exactly_once_enabled = True + manager.ack_histogram.add(histogram.MIN_ACK_DEADLINE) # make p99 value small + + # The deadline configured in flow control should prevail. + deadline = manager._obtain_ack_deadline(maybe_update=True) + # User-defined custom min ack_deadline value takes precedence over + # exactly_once default of 60 seconds. + assert deadline == histogram.MAX_ACK_DEADLINE + assert manager._stream_ack_deadline == histogram.MAX_ACK_DEADLINE + + +def test__obtain_ack_deadline_no_value_update(): + manager = make_manager() + + # Make sure that max_duration_per_lease_extension is disabled. + manager._flow_control = types.FlowControl(max_duration_per_lease_extension=0) + + manager.ack_histogram.add(21) + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == 21 + + for _ in range(5): + manager.ack_histogram.add(35) # Gather some new ACK data. + + deadline = manager._obtain_ack_deadline(maybe_update=False) + assert deadline == 21 # still the same + + # Accessing the value through the ack_deadline property has no side effects either. + assert manager.ack_deadline == 21 + + # Updating the ack deadline is reflected on ack_deadline wrapper, too. + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert manager.ack_deadline == deadline == 35 def test_client_id(): manager1 = make_manager() - request1 = manager1._get_initial_request(stream_ack_deadline_seconds=10) + request1 = manager1._get_initial_request(stream_ack_deadline_seconds=60) client_id_1 = request1.client_id assert client_id_1 manager2 = make_manager() - request2 = manager2._get_initial_request(stream_ack_deadline_seconds=10) + request2 = manager2._get_initial_request(stream_ack_deadline_seconds=60) client_id_2 = request2.client_id assert client_id_2 @@ -163,20 +406,19 @@ def test_streaming_flow_control(): manager = make_manager( flow_control=types.FlowControl(max_messages=10, max_bytes=1000) ) - request = manager._get_initial_request(stream_ack_deadline_seconds=10) + request = manager._get_initial_request(stream_ack_deadline_seconds=60) assert request.max_outstanding_messages == 10 assert request.max_outstanding_bytes == 1000 -def test_ack_deadline_with_max_duration_per_lease_extension(): - manager = make_manager() - manager._flow_control = types.FlowControl(max_duration_per_lease_extension=5) - - assert manager.ack_deadline == 5 - for _ in range(5): - manager.ack_histogram.add(20) - - assert manager.ack_deadline == 5 +def test_streaming_flow_control_use_legacy_flow_control(): + manager = make_manager( + flow_control=types.FlowControl(max_messages=10, max_bytes=1000), + use_legacy_flow_control=True, + ) + request = manager._get_initial_request(stream_ack_deadline_seconds=60) + assert request.max_outstanding_messages == 0 + assert request.max_outstanding_bytes == 0 def test_maybe_pause_consumer_wo_consumer_set(): @@ -277,8 +519,8 @@ def test__maybe_release_messages_on_overload(): manager = make_manager( flow_control=types.FlowControl(max_messages=10, max_bytes=1000) ) + msg = create_mock_message(ack_id="ack", size=11) - msg = mock.create_autospec(message.Message, instance=True, ack_id="ack", size=11) manager._messages_on_hold.put(msg) manager._on_hold_bytes = msg.size @@ -294,6 +536,44 @@ def test__maybe_release_messages_on_overload(): manager._scheduler.schedule.assert_not_called() +def test_opentelemetry__maybe_release_messages_subscribe_scheduler_span(span_exporter): + manager = make_manager( + flow_control=types.FlowControl(max_messages=10, max_bytes=1000) + ) + manager._callback = mock.sentinel.callback + + # Init leaser message count to 11, so that when subtracting the 3 messages + # that are on hold, there is still room for another 2 messages before the + # max load is hit. + _leaser = manager._leaser = mock.create_autospec(leaser.Leaser) + fake_leaser_add(_leaser, init_msg_count=8, assumed_msg_size=10) + msg = create_mock_message(ack_id="ack_foo", size=10) + + msg.message_id = 3 + opentelemetry_data = SubscribeOpenTelemetry(msg) + msg.opentelemetry_data = opentelemetry_data + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + manager._messages_on_hold.put(msg) + manager._maybe_release_messages() + opentelemetry_data.end_subscribe_span() + spans = span_exporter.get_finished_spans() + + assert len(spans) == 2 + + subscriber_scheduler_span, subscribe_span = spans + + assert subscriber_scheduler_span.name == "subscriber scheduler" + assert subscribe_span.name == "subscriptionID subscribe" + + assert subscriber_scheduler_span.parent == subscribe_span.context + assert subscriber_scheduler_span.kind == trace.SpanKind.INTERNAL + + def test__maybe_release_messages_below_overload(): manager = make_manager( flow_control=types.FlowControl(max_messages=10, max_bytes=1000) @@ -330,12 +610,15 @@ def test__maybe_release_messages_below_overload(): assert call_args[1].ack_id in ("ack_foo", "ack_bar") -def test__maybe_release_messages_negative_on_hold_bytes_warning(caplog): +def test__maybe_release_messages_negative_on_hold_bytes_warning( + caplog, modify_google_logger_propagation +): manager = make_manager( flow_control=types.FlowControl(max_messages=10, max_bytes=1000) ) + manager._callback = lambda msg: msg # pragma: NO COVER - msg = mock.create_autospec(message.Message, instance=True, ack_id="ack", size=17) + msg = create_mock_message(ack_id="ack", size=17) manager._messages_on_hold.put(msg) manager._on_hold_bytes = 5 # too low for some reason @@ -358,21 +641,240 @@ def test__maybe_release_messages_negative_on_hold_bytes_warning(caplog): assert manager._on_hold_bytes == 0 # should be auto-corrected -def test_send_unary(): - manager = make_manager() - manager._UNARY_REQUESTS = True +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +@pytest.mark.parametrize( + "receipt_modack", + [ + True, + False, + ], +) +def test_opentelemetry__send_lease_modacks(span_exporter, receipt_modack): + manager, _, _, _, _, _ = make_running_manager( + enable_open_telemetry=True, + subscription_name="projects/projectID/subscriptions/subscriptionID", + ) + data1 = SubscribeOpenTelemetry( + message=gapic_types.PubsubMessage(data=b"foo", message_id="1") + ) + data2 = SubscribeOpenTelemetry( + message=gapic_types.PubsubMessage(data=b"bar", message_id="2") + ) - manager.send( - types.StreamingPullRequest( + data1.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id1", + delivery_attempt=2, + ) + data2.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id1", + delivery_attempt=2, + ) + mock_span_context = mock.Mock(spec=trace.SpanContext) + mock_span_context.trace_flags.sampled = False + with mock.patch.object( + data1._subscribe_span, "get_span_context", return_value=mock_span_context + ): + manager._send_lease_modacks( ack_ids=["ack_id1", "ack_id2"], - modify_deadline_ack_ids=["ack_id3", "ack_id4", "ack_id5"], - modify_deadline_seconds=[10, 20, 20], + ack_deadline=20, + opentelemetry_data=[data1, data2], + receipt_modack=receipt_modack, ) + data1.end_subscribe_span() + data2.end_subscribe_span() + spans = span_exporter.get_finished_spans() + assert len(spans) == 3 + modack_span, subscribe_span1, subscribe_span2 = spans + + assert len(subscribe_span1.events) == 0 + assert len(subscribe_span2.events) == 0 + + assert len(subscribe_span1.links) == 0 + assert len(subscribe_span2.links) == 1 + assert subscribe_span2.links[0].context == modack_span.context + assert subscribe_span2.links[0].attributes["messaging.operation.name"] == "modack" + + assert modack_span.name == "subscriptionID modack" + assert modack_span.parent is None + assert modack_span.kind == trace.SpanKind.CLIENT + assert len(modack_span.links) == 1 + modack_span_attributes = modack_span.attributes + assert modack_span_attributes["messaging.system"] == "gcp_pubsub" + assert modack_span_attributes["messaging.batch.message_count"] == 2 + assert math.isclose( + modack_span_attributes["messaging.gcp_pubsub.message.ack_deadline"], 20 ) + assert modack_span_attributes["messaging.destination.name"] == "subscriptionID" + assert modack_span_attributes["gcp.project_id"] == "projectID" + assert modack_span_attributes["messaging.operation.name"] == "modack" + assert modack_span_attributes["code.function"] == "_send_lease_modacks" + assert ( + modack_span_attributes["messaging.gcp_pubsub.is_receipt_modack"] + == receipt_modack + ) + + +def test_send_unary_ack(): + manager = make_manager() + + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", byte_size=0, time_to_ack=20, ordering_key="", future=None + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", byte_size=0, time_to_ack=20, ordering_key="", future=None + ), + } + manager.send_unary_ack(ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict) + + manager._client.acknowledge.assert_called_once_with( + subscription=manager._subscription, ack_ids=["ack_id1", "ack_id2"] + ) + + +def test_send_unary_ack_exactly_once_enabled_with_futures(): + manager = make_manager() + manager._exactly_once_enabled = True + + future1 = futures.Future() + future2 = futures.Future() + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future1, + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future2, + ), + } + manager.send_unary_ack(ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict) manager._client.acknowledge.assert_called_once_with( subscription=manager._subscription, ack_ids=["ack_id1", "ack_id2"] ) + assert future1.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + assert future2.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + + +def test_send_unary_ack_exactly_once_disabled_with_futures(): + manager = make_manager() + + future1 = futures.Future() + future2 = futures.Future() + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future1, + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future2, + ), + } + manager.send_unary_ack(ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict) + + manager._client.acknowledge.assert_called_once_with( + subscription=manager._subscription, ack_ids=["ack_id1", "ack_id2"] + ) + assert future1.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + assert future2.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + + +def test_send_unary_modack(): + manager = make_manager() + + ack_reqs_dict = { + "ack_id3": requests.ModAckRequest(ack_id="ack_id3", seconds=60, future=None), + "ack_id4": requests.ModAckRequest(ack_id="ack_id4", seconds=60, future=None), + "ack_id5": requests.ModAckRequest(ack_id="ack_id5", seconds=60, future=None), + } + manager.send_unary_modack( + modify_deadline_ack_ids=["ack_id3", "ack_id4", "ack_id5"], + modify_deadline_seconds=[10, 20, 20], + ack_reqs_dict=ack_reqs_dict, + ) + + manager._client.modify_ack_deadline.assert_has_calls( + [ + mock.call( + subscription=manager._subscription, + ack_ids=["ack_id3"], + ack_deadline_seconds=10, + ), + mock.call( + subscription=manager._subscription, + ack_ids=["ack_id4", "ack_id5"], + ack_deadline_seconds=20, + ), + ], + any_order=True, + ) + + +def test_send_unary_modack_default_deadline(): + manager = make_manager() + + ack_reqs_dict = { + "ack_id3": requests.ModAckRequest(ack_id="ack_id3", seconds=60, future=None), + "ack_id4": requests.ModAckRequest(ack_id="ack_id4", seconds=60, future=None), + "ack_id5": requests.ModAckRequest(ack_id="ack_id5", seconds=60, future=None), + } + manager.send_unary_modack( + modify_deadline_ack_ids=["ack_id3", "ack_id4", "ack_id5"], + modify_deadline_seconds=None, + ack_reqs_dict=ack_reqs_dict, + default_deadline=10, + ) + + manager._client.modify_ack_deadline.assert_has_calls( + [ + mock.call( + subscription=manager._subscription, + ack_ids=["ack_id3", "ack_id4", "ack_id5"], + ack_deadline_seconds=10, + ), + ], + any_order=True, + ) + + +def test_send_unary_modack_exactly_once_enabled_with_futures(): + manager = make_manager() + manager._exactly_once_enabled = True + + future1 = futures.Future() + future2 = futures.Future() + future3 = futures.Future() + ack_reqs_dict = { + "ack_id3": requests.ModAckRequest(ack_id="ack_id3", seconds=60, future=future1), + "ack_id4": requests.ModAckRequest(ack_id="ack_id4", seconds=60, future=future2), + "ack_id5": requests.ModAckRequest(ack_id="ack_id5", seconds=60, future=future3), + } + manager.send_unary_modack( + modify_deadline_ack_ids=["ack_id3", "ack_id4", "ack_id5"], + modify_deadline_seconds=[10, 20, 20], + ack_reqs_dict=ack_reqs_dict, + ) manager._client.modify_ack_deadline.assert_has_calls( [ @@ -389,78 +891,424 @@ def test_send_unary(): ], any_order=True, ) + assert future1.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + assert future2.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + assert future3.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS -def test_send_unary_empty(): +def test_send_unary_modack_exactly_once_disabled_with_futures(): manager = make_manager() - manager._UNARY_REQUESTS = True - manager.send(types.StreamingPullRequest()) + future1 = futures.Future() + future2 = futures.Future() + future3 = futures.Future() + ack_reqs_dict = { + "ack_id3": requests.ModAckRequest(ack_id="ack_id3", seconds=60, future=future1), + "ack_id4": requests.ModAckRequest(ack_id="ack_id4", seconds=60, future=future2), + "ack_id5": requests.ModAckRequest(ack_id="ack_id5", seconds=60, future=future3), + } + manager.send_unary_modack( + modify_deadline_ack_ids=["ack_id3", "ack_id4", "ack_id5"], + modify_deadline_seconds=[10, 20, 20], + ack_reqs_dict=ack_reqs_dict, + ) - manager._client.acknowledge.assert_not_called() - manager._client.modify_ack_deadline.assert_not_called() + manager._client.modify_ack_deadline.assert_has_calls( + [ + mock.call( + subscription=manager._subscription, + ack_ids=["ack_id3"], + ack_deadline_seconds=10, + ), + mock.call( + subscription=manager._subscription, + ack_ids=["ack_id4", "ack_id5"], + ack_deadline_seconds=20, + ), + ], + any_order=True, + ) + assert future1.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + assert future2.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + assert future3.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS -def test_send_unary_api_call_error(caplog): +def test_send_unary_ack_api_call_error(caplog, modify_google_logger_propagation): caplog.set_level(logging.DEBUG) manager = make_manager() - manager._UNARY_REQUESTS = True error = exceptions.GoogleAPICallError("The front fell off") manager._client.acknowledge.side_effect = error - manager.send(types.StreamingPullRequest(ack_ids=["ack_id1", "ack_id2"])) + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", byte_size=0, time_to_ack=20, ordering_key="", future=None + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", byte_size=0, time_to_ack=20, ordering_key="", future=None + ), + } + manager.send_unary_ack(ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict) + + assert "The front fell off" in caplog.text + + +def test_send_unary_modack_api_call_error(caplog, modify_google_logger_propagation): + caplog.set_level(logging.DEBUG) + + manager = make_manager() + + error = exceptions.GoogleAPICallError("The front fell off") + manager._client.modify_ack_deadline.side_effect = error + + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=futures.Future(), + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=futures.Future(), + ), + } + manager.send_unary_modack( + modify_deadline_ack_ids=["ack_id_string"], + modify_deadline_seconds=[0], + ack_reqs_dict=ack_reqs_dict, + ) assert "The front fell off" in caplog.text -def test_send_unary_retry_error(caplog): +def test_send_unary_ack_retry_error_exactly_once_disabled_no_futures( + caplog, modify_google_logger_propagation +): caplog.set_level(logging.DEBUG) manager, _, _, _, _, _ = make_running_manager() - manager._UNARY_REQUESTS = True + manager._exactly_once_enabled = False error = exceptions.RetryError( "Too long a transient error", cause=Exception("Out of time!") ) manager._client.acknowledge.side_effect = error + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=None, + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=None, + ), + } with pytest.raises(exceptions.RetryError): - manager.send(types.StreamingPullRequest(ack_ids=["ack_id1", "ack_id2"])) + manager.send_unary_ack( + ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict + ) - assert "RetryError while sending unary RPC" in caplog.text + assert "RetryError while sending ack RPC" in caplog.text assert "signaled streaming pull manager shutdown" in caplog.text -def test_send_streaming(): - manager = make_manager() - manager._UNARY_REQUESTS = False - manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) +def test_send_unary_ack_retry_error_exactly_once_disabled_with_futures( + caplog, modify_google_logger_propagation +): + caplog.set_level(logging.DEBUG) - manager.send(mock.sentinel.request) + manager, _, _, _, _, _ = make_running_manager() + manager._exactly_once_enabled = False - manager._rpc.send.assert_called_once_with(mock.sentinel.request) + error = exceptions.RetryError( + "Too long a transient error", cause=Exception("Out of time!") + ) + manager._client.acknowledge.side_effect = error + future1 = futures.Future() + future2 = futures.Future() + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future1, + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future2, + ), + } + with pytest.raises(exceptions.RetryError): + manager.send_unary_ack( + ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict + ) -def test_heartbeat(): - manager = make_manager() - manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) - manager._rpc.is_active = True + assert "RetryError while sending ack RPC" in caplog.text + assert "signaled streaming pull manager shutdown" in caplog.text + assert future1.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + assert future2.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS - manager.heartbeat() - manager._rpc.send.assert_called_once_with(types.StreamingPullRequest()) +def test_send_unary_ack_retry_error_exactly_once_enabled_no_futures( + caplog, modify_google_logger_propagation +): + caplog.set_level(logging.DEBUG) + manager, _, _, _, _, _ = make_running_manager() + manager._exactly_once_enabled = True -def test_heartbeat_inactive(): - manager = make_manager() - manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) - manager._rpc.is_active = False + error = exceptions.RetryError( + "Too long a transient error", cause=Exception("Out of time!") + ) + manager._client.acknowledge.side_effect = error - manager.heartbeat() + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=None, + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=None, + ), + } + with pytest.raises(exceptions.RetryError): + manager.send_unary_ack( + ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict + ) + + assert "RetryError while sending ack RPC" in caplog.text + assert "signaled streaming pull manager shutdown" in caplog.text + + +def test_send_unary_ack_retry_error_exactly_once_enabled_with_futures( + caplog, modify_google_logger_propagation +): + caplog.set_level(logging.DEBUG) + + manager, _, _, _, _, _ = make_running_manager() + manager._exactly_once_enabled = True + + error = exceptions.RetryError( + "Too long a transient error", cause=Exception("Out of time!") + ) + manager._client.acknowledge.side_effect = error + + future1 = futures.Future() + future2 = futures.Future() + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future1, + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future2, + ), + } + with pytest.raises(exceptions.RetryError): + manager.send_unary_ack( + ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict + ) + + assert "RetryError while sending ack RPC" in caplog.text + assert "signaled streaming pull manager shutdown" in caplog.text + assert isinstance(future1.exception(), subscriber_exceptions.AcknowledgeError) + assert ( + future1.exception().error_code is subscriber_exceptions.AcknowledgeStatus.OTHER + ) + assert isinstance(future2.exception(), subscriber_exceptions.AcknowledgeError) + assert ( + future2.exception().error_code is subscriber_exceptions.AcknowledgeStatus.OTHER + ) + + +def test_send_unary_modack_retry_error_exactly_once_disabled_no_future( + caplog, modify_google_logger_propagation +): + caplog.set_level(logging.DEBUG) + + manager, _, _, _, _, _ = make_running_manager() + manager._exactly_once_enabled = False + + error = exceptions.RetryError( + "Too long a transient error", cause=Exception("Out of time!") + ) + manager._client.modify_ack_deadline.side_effect = error + + ack_reqs_dict = { + "ackid1": requests.ModAckRequest(ack_id="ackid1", seconds=60, future=None) + } + with pytest.raises(exceptions.RetryError): + manager.send_unary_modack( + modify_deadline_ack_ids=["ackid1"], + modify_deadline_seconds=[0], + ack_reqs_dict=ack_reqs_dict, + ) + + assert "RetryError while sending modack RPC" in caplog.text + assert "signaled streaming pull manager shutdown" in caplog.text - manager._rpc.send.assert_not_called() + +def test_send_unary_modack_retry_error_exactly_once_disabled_with_futures( + caplog, modify_google_logger_propagation +): + caplog.set_level(logging.DEBUG) + + manager, _, _, _, _, _ = make_running_manager() + manager._exactly_once_enabled = False + + error = exceptions.RetryError( + "Too long a transient error", cause=Exception("Out of time!") + ) + manager._client.modify_ack_deadline.side_effect = error + + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.ModAckRequest(ack_id="ackid1", seconds=60, future=future) + } + with pytest.raises(exceptions.RetryError): + manager.send_unary_modack( + modify_deadline_ack_ids=["ackid1"], + modify_deadline_seconds=[0], + ack_reqs_dict=ack_reqs_dict, + ) + + assert "RetryError while sending modack RPC" in caplog.text + assert "signaled streaming pull manager shutdown" in caplog.text + assert future.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + + +def test_send_unary_modack_retry_error_exactly_once_enabled_no_futures( + caplog, modify_google_logger_propagation +): + caplog.set_level(logging.DEBUG) + + manager, _, _, _, _, _ = make_running_manager() + manager._exactly_once_enabled = True + + error = exceptions.RetryError( + "Too long a transient error", cause=Exception("Out of time!") + ) + manager._client.modify_ack_deadline.side_effect = error + + ack_reqs_dict = { + "ackid1": requests.ModAckRequest(ack_id="ackid1", seconds=60, future=None) + } + with pytest.raises(exceptions.RetryError): + manager.send_unary_modack( + modify_deadline_ack_ids=["ackid1"], + modify_deadline_seconds=[0], + ack_reqs_dict=ack_reqs_dict, + ) + + assert "RetryError while sending modack RPC" in caplog.text + assert "signaled streaming pull manager shutdown" in caplog.text + + +def test_send_unary_modack_retry_error_exactly_once_enabled_with_futures( + caplog, modify_google_logger_propagation +): + caplog.set_level(logging.DEBUG) + + manager, _, _, _, _, _ = make_running_manager() + manager._exactly_once_enabled = True + + error = exceptions.RetryError( + "Too long a transient error", cause=Exception("Out of time!") + ) + manager._client.modify_ack_deadline.side_effect = error + + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.ModAckRequest(ack_id="ackid1", seconds=60, future=future) + } + with pytest.raises(exceptions.RetryError): + manager.send_unary_modack( + modify_deadline_ack_ids=["ackid1"], + modify_deadline_seconds=[0], + ack_reqs_dict=ack_reqs_dict, + ) + + assert "RetryError while sending modack RPC" in caplog.text + assert "signaled streaming pull manager shutdown" in caplog.text + assert isinstance(future.exception(), subscriber_exceptions.AcknowledgeError) + assert ( + future.exception().error_code is subscriber_exceptions.AcknowledgeStatus.OTHER + ) + + +def test_heartbeat(): + manager = make_manager() + manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + manager._rpc.is_active = True + + result = manager.heartbeat() + + manager._rpc.send.assert_called_once_with(gapic_types.StreamingPullRequest()) + assert result + + +def test_heartbeat_inactive(): + manager = make_manager() + manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + manager._rpc.is_active = False + + manager.heartbeat() + + result = manager._rpc.send.assert_not_called() + assert not result + + +def test_heartbeat_stream_ack_deadline_seconds( + caplog, modify_google_logger_propagation +): + caplog.set_level(logging.DEBUG) + manager = make_manager() + manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + manager._rpc.is_active = True + # Send new ack deadline with next heartbeat. + manager._send_new_ack_deadline = True + + result = manager.heartbeat() + + manager._rpc.send.assert_called_once_with( + gapic_types.StreamingPullRequest(stream_ack_deadline_seconds=60) + ) + assert result + # Set to false after a send is initiated. + assert not manager._send_new_ack_deadline + assert "Sending new ack_deadline of 60 seconds." in caplog.text @mock.patch("google.api_core.bidi.ResumableBidiRpc", autospec=True) @@ -475,7 +1323,10 @@ def test_heartbeat_inactive(): def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bidi_rpc): manager = make_manager() - manager.open(mock.sentinel.callback, mock.sentinel.on_callback_error) + with mock.patch.object( + type(manager), "ack_deadline", new=mock.PropertyMock(return_value=18) + ): + manager.open(mock.sentinel.callback, mock.sentinel.on_callback_error) heartbeater.assert_called_once_with(manager) heartbeater.return_value.start.assert_called_once() @@ -489,21 +1340,28 @@ def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bi leaser.return_value.start.assert_called_once() assert manager.leaser == leaser.return_value - background_consumer.assert_called_once_with(manager._rpc, manager._on_response) + if streaming_pull_manager._SHOULD_USE_ON_FATAL_ERROR_CALLBACK: + background_consumer.assert_called_once_with( + manager._rpc, manager._on_response, manager._on_fatal_exception + ) + else: + background_consumer.assert_called_once_with(manager._rpc, manager._on_response) + background_consumer.return_value.start.assert_called_once() assert manager._consumer == background_consumer.return_value resumable_bidi_rpc.assert_called_once_with( - start_rpc=manager._client.api.streaming_pull, + start_rpc=manager._client.streaming_pull, initial_request=mock.ANY, should_recover=manager._should_recover, should_terminate=manager._should_terminate, + metadata=manager._stream_metadata, throttle_reopen=True, ) initial_request_arg = resumable_bidi_rpc.call_args.kwargs["initial_request"] assert initial_request_arg.func == manager._get_initial_request - assert initial_request_arg.args[0] == 10 # the default stream ACK timeout - assert not manager._client.api.get_subscription.called + assert initial_request_arg.args[0] == 60 + assert not manager._client.get_subscription.called resumable_bidi_rpc.return_value.add_done_callback.assert_called_once_with( manager._on_rpc_done @@ -531,14 +1389,17 @@ def test_open_has_been_closed(): manager.open(mock.sentinel.callback, mock.sentinel.on_callback_error) -def make_running_manager(): - manager = make_manager() +def make_running_manager( + enable_open_telemetry: bool = False, + subscription_name: str = "subscription-name", + **kwargs, +): + manager = make_manager(enable_open_telemetry, subscription_name, **kwargs) manager._consumer = mock.create_autospec(bidi.BackgroundConsumer, instance=True) manager._consumer.is_active = True manager._dispatcher = mock.create_autospec(dispatcher.Dispatcher, instance=True) manager._leaser = mock.create_autospec(leaser.Leaser, instance=True) manager._heartbeater = mock.create_autospec(heartbeater.Heartbeater, instance=True) - return ( manager, manager._consumer, @@ -549,6 +1410,19 @@ def make_running_manager(): ) +def await_manager_shutdown(manager, timeout=None): + # NOTE: This method should be called after manager.close(), i.e. after the shutdown + # thread has been created and started. + shutdown_thread = manager._regular_shutdown_thread + + if shutdown_thread is None: # pragma: NO COVER + raise Exception("Shutdown thread does not exist on the manager instance.") + + shutdown_thread.join(timeout=timeout) + if shutdown_thread.is_alive(): # pragma: NO COVER + pytest.fail("Shutdown not completed in time.") + + def test_close(): ( manager, @@ -560,6 +1434,7 @@ def test_close(): ) = make_running_manager() manager.close() + await_manager_shutdown(manager, timeout=3) consumer.stop.assert_called_once() leaser.stop.assert_called_once() @@ -570,6 +1445,31 @@ def test_close(): assert manager.is_active is False +def test_closes_on_fatal_consumer_error(): + ( + manager, + consumer, + dispatcher, + leaser, + heartbeater, + scheduler, + ) = make_running_manager() + + if streaming_pull_manager._SHOULD_USE_ON_FATAL_ERROR_CALLBACK: + error = ValueError("some fatal exception") + manager._on_fatal_exception(error) + + await_manager_shutdown(manager, timeout=3) + + consumer.stop.assert_called_once() + leaser.stop.assert_called_once() + dispatcher.stop.assert_called_once() + heartbeater.stop.assert_called_once() + scheduler.shutdown.assert_called_once() + + assert manager.is_active is False + + def test_close_inactive_consumer(): ( manager, @@ -582,6 +1482,7 @@ def test_close_inactive_consumer(): consumer.is_active = False manager.close() + await_manager_shutdown(manager, timeout=3) consumer.stop.assert_not_called() leaser.stop.assert_called_once() @@ -595,6 +1496,7 @@ def test_close_idempotent(): manager.close() manager.close() + await_manager_shutdown(manager, timeout=3) assert scheduler.shutdown.call_count == 1 @@ -620,14 +1522,14 @@ def _do_work(self): while not self._stop: try: self._manager.leaser.add([mock.Mock()]) - except Exception as exc: + except Exception as exc: # pragma: NO COVER self._error_callback(exc) time.sleep(0.1) # also try to interact with the leaser after the stop flag has been set try: self._manager.leaser.remove([mock.Mock()]) - except Exception as exc: + except Exception as exc: # pragma: NO COVER self._error_callback(exc) @@ -639,6 +1541,7 @@ def test_close_no_dispatcher_error(): dispatcher.start() manager.close() + await_manager_shutdown(manager, timeout=3) error_callback.assert_not_called() @@ -650,10 +1553,112 @@ def test_close_callbacks(): manager.add_close_callback(callback) manager.close(reason="meep") + await_manager_shutdown(manager, timeout=3) callback.assert_called_once_with(manager, "meep") +def test_close_blocking_scheduler_shutdown(): + manager, _, _, _, _, _ = make_running_manager(await_callbacks_on_shutdown=True) + scheduler = manager._scheduler + + manager.close() + await_manager_shutdown(manager, timeout=3) + + scheduler.shutdown.assert_called_once_with(await_msg_callbacks=True) + + +def test__on_response_none_scheduler(): + manager, _, _, _, _, _ = make_running_manager() + + manager._callback = mock.sentinel.callback + manager._scheduler = None + # Set up the messages. + response = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="ack1", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ), + gapic_types.ReceivedMessage( + ack_id="ack2", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), + delivery_attempt=6, + ), + ] + ) + + manager._maybe_release_messages = mock.Mock() + + # adjust message bookkeeping in leaser + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=42) + manager._on_response(response) + + manager._maybe_release_messages.assert_not_called + + +def test__on_response_none_leaser(): + manager, _, _, _, _, _ = make_running_manager() + + manager._callback = mock.sentinel.callback + manager._leaser = None + # Set up the messages. + response = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="ack1", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ), + gapic_types.ReceivedMessage( + ack_id="ack2", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), + delivery_attempt=6, + ), + ] + ) + + manager._maybe_release_messages = mock.Mock() + + manager._on_response(response) + + manager._maybe_release_messages.assert_not_called + + +def test_close_nonblocking_scheduler_shutdown(): + manager, _, _, _, _, _ = make_running_manager(await_callbacks_on_shutdown=False) + scheduler = manager._scheduler + + manager.close() + await_manager_shutdown(manager, timeout=3) + + scheduler.shutdown.assert_called_once_with(await_msg_callbacks=False) + + +def test_close_nacks_internally_queued_messages(): + nacked_messages = [] + + def fake_nack(self): + nacked_messages.append(self.data) + + messages = [ + create_message(data=b"msg1"), + create_message(data=b"msg2"), + create_message(data=b"msg3"), + ] + for msg in messages: + msg.nack = stdlib_types.MethodType(fake_nack, msg) + + manager, _, _, _, _, _ = make_running_manager() + dropped_by_scheduler = messages[:2] + manager._scheduler.shutdown.return_value = dropped_by_scheduler + manager._messages_on_hold._messages_on_hold.append(messages[2]) + + manager.close() + await_manager_shutdown(manager, timeout=3) + + assert sorted(nacked_messages) == [b"msg1", b"msg2", b"msg3"] + + def test__get_initial_request(): manager = make_manager() manager._leaser = mock.create_autospec(leaser.Leaser, instance=True) @@ -661,11 +1666,11 @@ def test__get_initial_request(): initial_request = manager._get_initial_request(123) - assert isinstance(initial_request, types.StreamingPullRequest) + assert isinstance(initial_request, gapic_types.StreamingPullRequest) assert initial_request.subscription == "subscription-name" assert initial_request.stream_ack_deadline_seconds == 123 - assert initial_request.modify_deadline_ack_ids == ["1", "2"] - assert initial_request.modify_deadline_seconds == [10, 10] + assert initial_request.modify_deadline_ack_ids == [] + assert initial_request.modify_deadline_seconds == [] def test__get_initial_request_wo_leaser(): @@ -674,7 +1679,7 @@ def test__get_initial_request_wo_leaser(): initial_request = manager._get_initial_request(123) - assert isinstance(initial_request, types.StreamingPullRequest) + assert isinstance(initial_request, gapic_types.StreamingPullRequest) assert initial_request.subscription == "subscription-name" assert initial_request.stream_ack_deadline_seconds == 123 assert initial_request.modify_deadline_ack_ids == [] @@ -686,14 +1691,15 @@ def test__on_response_delivery_attempt(): manager._callback = mock.sentinel.callback # Set up the messages. - response = types.StreamingPullResponse( + response = gapic_types.StreamingPullResponse( received_messages=[ - types.ReceivedMessage( - ack_id="fack", message=types.PubsubMessage(data=b"foo", message_id="1") + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), ), - types.ReceivedMessage( + gapic_types.ReceivedMessage( ack_id="back", - message=types.PubsubMessage(data=b"bar", message_id="2"), + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), delivery_attempt=6, ), ] @@ -712,18 +1718,176 @@ def test__on_response_delivery_attempt(): assert msg2.delivery_attempt == 6 +def test__on_response_modifies_ack_deadline(): + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + + # Set up the messages. + response = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="ack_1", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ), + gapic_types.ReceivedMessage( + ack_id="ack_2", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), + ), + ] + ) + + # adjust message bookkeeping in leaser + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=80) + + # Actually run the method and chack that correct MODACK value is used. + with mock.patch.object( + type(manager), "ack_deadline", new=mock.PropertyMock(return_value=18) + ): + manager._on_response(response) + + dispatcher.modify_ack_deadline.assert_called_once_with( + [ + requests.ModAckRequest("ack_1", 18, None), + requests.ModAckRequest("ack_2", 18, None), + ], + 18, + ) + + +def test__on_response_modifies_ack_deadline_with_exactly_once_min_lease(): + # exactly_once is disabled by default. + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + complete_modify_ack_deadline_calls(dispatcher) + + # make p99 value smaller than exactly_once min lease + manager.ack_histogram.add(10) + + # adjust message bookkeeping in leaser + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=42) + + # Set up the response with the first set of messages and exactly_once not + # enabled. + response1 = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="ack_1", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ), + gapic_types.ReceivedMessage( + ack_id="ack_2", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), + ), + ], + subscription_properties=gapic_types.StreamingPullResponse.SubscriptionProperties( + exactly_once_delivery_enabled=False + ), + ) + + # Set up the response with the second set of messages and exactly_once enabled. + response2 = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="ack_3", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ), + gapic_types.ReceivedMessage( + ack_id="ack_4", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), + ), + ], + subscription_properties=gapic_types.StreamingPullResponse.SubscriptionProperties( + exactly_once_delivery_enabled=True + ), + ) + + # exactly_once is still disabled b/c subscription_properties says so + manager._on_response(response1) + + # expect mod-acks are called with histogram-based lease value + assert len(dispatcher.modify_ack_deadline.mock_calls) == 1 + call = dispatcher.modify_ack_deadline.mock_calls[0] + assert call.args[0] == [ + requests.ModAckRequest("ack_1", 10, None), + requests.ModAckRequest("ack_2", 10, None), + ] + assert call.args[1] == 10 + + # exactly_once should be enabled after this request b/c subscription_properties says so + manager._on_response(response2) + + # expect mod-acks called with 60 sec min lease value for exactly_once subscriptions + # ignore the futures here + assert len(dispatcher.modify_ack_deadline.mock_calls) == 2 + call = dispatcher.modify_ack_deadline.mock_calls[1] + modack_reqs = call.args[0] + assert modack_reqs[0].ack_id == "ack_3" + assert modack_reqs[0].seconds == 60 + assert modack_reqs[1].ack_id == "ack_4" + assert modack_reqs[1].seconds == 60 + modack_deadline = call.args[1] + assert modack_deadline == 60 + + +def test__on_response_send_ack_deadline_after_enabling_exactly_once(): + # exactly_once is disabled by default. + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + complete_modify_ack_deadline_calls(dispatcher) + + # set up an active RPC + manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + manager._rpc.is_active = True + + # make p99 value smaller than exactly_once min lease + manager.ack_histogram.add(10) + + # adjust message bookkeeping in leaser + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=42) + + # Set up the response with the a message and exactly_once enabled. + response2 = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="ack_1", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ) + ], + subscription_properties=gapic_types.StreamingPullResponse.SubscriptionProperties( + exactly_once_delivery_enabled=True + ), + ) + + # exactly_once should be enabled after this request b/c subscription_properties says so + # when exactly_once is enabled or disabled, we send a new ack_deadline via + # the heartbeat + # should satisfy assertion 1 + manager._on_response(response2) + + # simulate periodic heartbeat trigger + heartbeat_request_sent = manager.heartbeat() + assert heartbeat_request_sent + + # heartbeat request is sent with the 60 sec min lease value for exactly_once subscriptions + manager._rpc.send.assert_called_once_with( + gapic_types.StreamingPullRequest(stream_ack_deadline_seconds=60) + ) + + def test__on_response_no_leaser_overload(): manager, _, dispatcher, leaser, _, scheduler = make_running_manager() manager._callback = mock.sentinel.callback # Set up the messages. - response = types.StreamingPullResponse( + response = gapic_types.StreamingPullResponse( received_messages=[ - types.ReceivedMessage( - ack_id="fack", message=types.PubsubMessage(data=b"foo", message_id="1") + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), ), - types.ReceivedMessage( - ack_id="back", message=types.PubsubMessage(data=b"bar", message_id="2") + gapic_types.ReceivedMessage( + ack_id="back", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), ), ] ) @@ -736,7 +1900,11 @@ def test__on_response_no_leaser_overload(): manager._on_response(response) dispatcher.modify_ack_deadline.assert_called_once_with( - [requests.ModAckRequest("fack", 10), requests.ModAckRequest("back", 10)] + [ + requests.ModAckRequest("fack", 10, None), + requests.ModAckRequest("back", 10, None), + ], + 10, ) schedule_calls = scheduler.schedule.mock_calls @@ -754,16 +1922,19 @@ def test__on_response_with_leaser_overload(): manager._callback = mock.sentinel.callback # Set up the messages. - response = types.StreamingPullResponse( + response = gapic_types.StreamingPullResponse( received_messages=[ - types.ReceivedMessage( - ack_id="fack", message=types.PubsubMessage(data=b"foo", message_id="1") + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), ), - types.ReceivedMessage( - ack_id="back", message=types.PubsubMessage(data=b"bar", message_id="2") + gapic_types.ReceivedMessage( + ack_id="back", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), ), - types.ReceivedMessage( - ack_id="zack", message=types.PubsubMessage(data=b"baz", message_id="3") + gapic_types.ReceivedMessage( + ack_id="zack", + message=gapic_types.PubsubMessage(data=b"baz", message_id="3"), ), ] ) @@ -780,10 +1951,11 @@ def test__on_response_with_leaser_overload(): # deadline extended, even those not dispatched to callbacks dispatcher.modify_ack_deadline.assert_called_once_with( [ - requests.ModAckRequest("fack", 10), - requests.ModAckRequest("back", 10), - requests.ModAckRequest("zack", 10), - ] + requests.ModAckRequest("fack", 10, None), + requests.ModAckRequest("back", 10, None), + requests.ModAckRequest("zack", 10, None), + ], + 10, ) # one message should be scheduled, the flow control limits allow for it @@ -805,7 +1977,7 @@ def test__on_response_with_leaser_overload(): assert msg.message_id in ("2", "3") -def test__on_response_none_data(caplog): +def test__on_response_none_data(caplog, modify_google_logger_propagation): caplog.set_level(logging.DEBUG) manager, _, dispatcher, leaser, _, scheduler = make_running_manager() @@ -825,23 +1997,23 @@ def test__on_response_with_ordering_keys(): manager._callback = mock.sentinel.callback # Set up the messages. - response = types.StreamingPullResponse( + response = gapic_types.StreamingPullResponse( received_messages=[ - types.ReceivedMessage( + gapic_types.ReceivedMessage( ack_id="fack", - message=types.PubsubMessage( + message=gapic_types.PubsubMessage( data=b"foo", message_id="1", ordering_key="" ), ), - types.ReceivedMessage( + gapic_types.ReceivedMessage( ack_id="back", - message=types.PubsubMessage( + message=gapic_types.PubsubMessage( data=b"bar", message_id="2", ordering_key="key1" ), ), - types.ReceivedMessage( + gapic_types.ReceivedMessage( ack_id="zack", - message=types.PubsubMessage( + message=gapic_types.PubsubMessage( data=b"baz", message_id="3", ordering_key="key1" ), ), @@ -860,10 +2032,11 @@ def test__on_response_with_ordering_keys(): # deadline extended, even those not dispatched to callbacks. dispatcher.modify_ack_deadline.assert_called_once_with( [ - requests.ModAckRequest("fack", 10), - requests.ModAckRequest("back", 10), - requests.ModAckRequest("zack", 10), - ] + requests.ModAckRequest("fack", 10, None), + requests.ModAckRequest("back", 10, None), + requests.ModAckRequest("zack", 10, None), + ], + 10, ) # The first two messages should be scheduled, The third should be put on @@ -901,18 +2074,221 @@ def test__on_response_with_ordering_keys(): assert manager._messages_on_hold.get() is None -def test_retryable_stream_errors(): - # Make sure the config matches our hard-coded tuple of exceptions. - interfaces = subscriber_client_config.config["interfaces"] - retry_codes = interfaces["google.pubsub.v1.Subscriber"]["retry_codes"] - idempotent = retry_codes["idempotent"] +def test__on_response_enable_exactly_once(): + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + complete_modify_ack_deadline_calls(dispatcher) + + # Set up the messages. + response = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ) + ], + subscription_properties=gapic_types.StreamingPullResponse.SubscriptionProperties( + exactly_once_delivery_enabled=True + ), + ) + + # adjust message bookkeeping in leaser + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=42) + + # exactly_once should be enabled + manager._on_response(response) + + assert manager._exactly_once_delivery_enabled() + # new deadline for exactly_once subscriptions should be used + assert manager.ack_deadline == 60 + + +def test__on_response_disable_exactly_once(): + from google.cloud.pubsub_v1.subscriber._protocol import histogram + + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback - status_codes = tuple(getattr(grpc.StatusCode, name, None) for name in idempotent) - expected = tuple( - exceptions.exception_class_for_grpc_status(status_code) - for status_code in status_codes + manager._flow_control = types.FlowControl( + min_duration_per_lease_extension=histogram.MIN_ACK_DEADLINE ) - assert set(expected).issubset(set(streaming_pull_manager._RETRYABLE_STREAM_ERRORS)) + # enable exactly_once + manager._exactly_once_enabled = True + + # Set up the messages. + response = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ) + ], + subscription_properties=gapic_types.StreamingPullResponse.SubscriptionProperties( + exactly_once_delivery_enabled=False + ), + ) + + # adjust message bookkeeping in leaser + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=42) + + # exactly_once should be disabled + manager._on_response(response) + + assert not manager._exactly_once_enabled + # The deadline configured in flow control should be used, not the + # exactly_once minimum since exactly_once has been disabled. + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == histogram.MIN_ACK_DEADLINE + assert manager._stream_ack_deadline == 60 + + +def test__on_response_exactly_once_immediate_modacks_fail( + caplog, + modify_google_logger_propagation, +): + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + + def complete_futures_with_error(*args, **kwargs): + modack_requests = args[0] + for req in modack_requests: + if req.ack_id == "fack": + req.future.set_exception( + subscriber_exceptions.AcknowledgeError( + subscriber_exceptions.AcknowledgeStatus.INVALID_ACK_ID, None + ) + ) + else: + req.future.set_exception( + subscriber_exceptions.AcknowledgeError( + subscriber_exceptions.AcknowledgeStatus.SUCCESS, None + ) + ) + + dispatcher.modify_ack_deadline.side_effect = complete_futures_with_error + + # Set up the messages. + response = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ), + gapic_types.ReceivedMessage( + ack_id="good", + message=gapic_types.PubsubMessage(data=b"foo", message_id="2"), + ), + ], + subscription_properties=gapic_types.StreamingPullResponse.SubscriptionProperties( + exactly_once_delivery_enabled=True + ), + ) + + # Actually run the method and prove that modack and schedule are called in + # the expected way. + + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=10) + + with caplog.at_level(logging.WARNING): + manager._on_response(response) + + # The second messages should be scheduled, and not the first. + + schedule_calls = scheduler.schedule.mock_calls + assert len(schedule_calls) == 1 + call_args = schedule_calls[0][1] + assert call_args[0] == mock.sentinel.callback + assert isinstance(call_args[1], message.Message) + assert call_args[1].message_id == "2" + + assert manager._messages_on_hold.size == 0 + + expected_warnings = [ + record.message.lower() + for record in caplog.records + if "AcknowledgeError when lease-modacking a message." in record.message + ] + assert len(expected_warnings) == 1 + + # No messages available + assert manager._messages_on_hold.get() is None + + # do not add message + assert manager.load == 0.001 + + +def test__on_response_exactly_once_immediate_modacks_fail_non_invalid( + caplog, modify_google_logger_propagation +): + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + + def complete_futures_with_error(*args, **kwargs): + modack_requests = args[0] + for req in modack_requests: + if req.ack_id == "fack": + req.future.set_exception( + subscriber_exceptions.AcknowledgeError( + subscriber_exceptions.AcknowledgeStatus.OTHER, None + ) + ) + else: + req.future.set_exception( + subscriber_exceptions.AcknowledgeError( + subscriber_exceptions.AcknowledgeStatus.SUCCESS, None + ) + ) + + dispatcher.modify_ack_deadline.side_effect = complete_futures_with_error + + # Set up the messages. + response = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ), + gapic_types.ReceivedMessage( + ack_id="good", + message=gapic_types.PubsubMessage(data=b"foo", message_id="2"), + ), + ], + subscription_properties=gapic_types.StreamingPullResponse.SubscriptionProperties( + exactly_once_delivery_enabled=True + ), + ) + + # Actually run the method and prove that modack and schedule are called in + # the expected way. + + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=10) + + with caplog.at_level(logging.WARNING): + manager._on_response(response) + + # The second messages should be scheduled, and not the first. + + schedule_calls = scheduler.schedule.mock_calls + assert len(schedule_calls) == 2 + call_args = schedule_calls[0][1] + assert call_args[0] == mock.sentinel.callback + assert isinstance(call_args[1], message.Message) + assert call_args[1].message_id == "1" + + assert manager._messages_on_hold.size == 0 + + expected_warnings = [ + record.message.lower() + for record in caplog.records + if "AcknowledgeError when lease-modacking a message." in record.message + ] + assert len(expected_warnings) == 2 + + # No messages available + assert manager._messages_on_hold.get() is None + + # do not add message + assert manager.load == 0.002 def test__should_recover_true(): @@ -935,18 +2311,24 @@ def test__should_recover_false(): def test__should_terminate_true(): manager = make_manager() - details = "Cancelled. Go away, before I taunt you a second time." - exc = exceptions.Cancelled(details) - - assert manager._should_terminate(exc) is True + for exc in [ + exceptions.Cancelled(""), + exceptions.PermissionDenied(""), + TypeError(), + ValueError(), + ]: + assert manager._should_terminate(exc) def test__should_terminate_false(): manager = make_manager() - exc = TypeError("wahhhhhh") - - assert manager._should_terminate(exc) is False + for exc in [ + exceptions.ResourceExhausted(""), + exceptions.ServiceUnavailable(""), + exceptions.DeadlineExceeded(""), + ]: + assert not manager._should_terminate(exc) @mock.patch("threading.Thread", autospec=True) @@ -956,8 +2338,12 @@ def test__on_rpc_done(thread): manager._on_rpc_done(mock.sentinel.error) thread.assert_called_once_with( - name=mock.ANY, target=manager.close, kwargs={"reason": mock.sentinel.error} + name=mock.ANY, target=manager._shutdown, kwargs={"reason": mock.ANY} ) + _, kwargs = thread.call_args + reason = kwargs["kwargs"]["reason"] + assert isinstance(reason, Exception) + assert reason.args == (mock.sentinel.error,) # Exception wraps the original error def test_activate_ordering_keys(): @@ -971,3 +2357,624 @@ def test_activate_ordering_keys(): manager._messages_on_hold.activate_ordering_keys.assert_called_once_with( ["key1", "key2"], mock.ANY ) + + +def test_activate_ordering_keys_stopped_scheduler(): + manager = make_manager() + manager._messages_on_hold = mock.create_autospec( + messages_on_hold.MessagesOnHold, instance=True + ) + manager._scheduler = None + + manager.activate_ordering_keys(["key1", "key2"]) + + manager._messages_on_hold.activate_ordering_keys.assert_not_called() + + +@mock.patch("grpc_status.rpc_status.from_call") +@mock.patch("google.protobuf.any_pb2.Any.Unpack") +def test_get_ack_errors_unable_to_unpack(from_call, unpack): + st = status_pb2.Status() + st.code = code_pb2.Code.INTERNAL + st.message = "qmsg" + error_info = error_details_pb2.ErrorInfo() + error_info.metadata["ack_1"] = "error1" + st.details.add().Pack(error_info) + mock_gprc_call = mock.Mock(spec=grpc.Call) + exception = exceptions.InternalServerError( + "msg", errors=(), response=mock_gprc_call + ) + from_call.return_value = st + # Unpack() failed + unpack.return_value = None + + assert not streaming_pull_manager._get_ack_errors(exception) + + +@mock.patch("grpc_status.rpc_status.from_call") +def test_get_ack_errors_no_response_obj(from_call): + exception = exceptions.InternalServerError("msg", errors=(), response=None) + # No response obj + assert not streaming_pull_manager._get_ack_errors(exception) + + +@mock.patch("grpc_status.rpc_status.from_call") +def test_get_ack_errors_from_call_returned_none(from_call): + mock_gprc_call = mock.Mock(spec=grpc.Call) + exception = exceptions.InternalServerError( + "msg", errors=(), response=mock_gprc_call + ) + from_call.return_value = None + # rpc_status.from_call() returned None + assert not streaming_pull_manager._get_ack_errors(exception) + + +@mock.patch("grpc_status.rpc_status.from_call") +def test_get_ack_errors_value_error_thrown(from_call): + mock_gprc_call = mock.Mock(spec=grpc.Call) + exception = exceptions.InternalServerError( + "msg", errors=(), response=mock_gprc_call + ) + from_call.side_effect = ValueError("val error msg") + # ValueError thrown, so return None + assert not streaming_pull_manager._get_ack_errors(exception) + + +@mock.patch("grpc_status.rpc_status.from_call") +def test_get_ack_errors_no_error_details(from_call): + st = status_pb2.Status() + st.code = code_pb2.Code.INTERNAL + st.message = "qmsg" + mock_gprc_call = mock.Mock(spec=grpc.Call) + exception = exceptions.InternalServerError( + "msg", errors=(), response=mock_gprc_call + ) + from_call.side_effect = None + from_call.return_value = st + # status has no details to extract exactly-once error info from + assert not streaming_pull_manager._get_ack_errors(exception) + + +@mock.patch("grpc_status.rpc_status.from_call") +def test_get_ack_errors_detail_not_error_info(from_call): + st = status_pb2.Status() + st.code = code_pb2.Code.INTERNAL + st.message = "qmsg" + # pack a dummy status instead of an ErrorInfo + dummy_status = status_pb2.Status() + st.details.add().Pack(dummy_status) + mock_gprc_call = mock.Mock(spec=grpc.Call) + exception = exceptions.InternalServerError( + "msg", errors=(), response=mock_gprc_call + ) + from_call.side_effect = None + from_call.return_value = st + assert not streaming_pull_manager._get_ack_errors(exception) + + +@mock.patch("grpc_status.rpc_status.from_call") +def test_get_ack_errors_happy_case(from_call): + st = status_pb2.Status() + st.code = code_pb2.Code.INTERNAL + st.message = "qmsg" + error_info = error_details_pb2.ErrorInfo() + error_info.metadata["ack_1"] = "error1" + st.details.add().Pack(error_info) + mock_gprc_call = mock.Mock(spec=grpc.Call) + exception = exceptions.InternalServerError( + "msg", errors=(), response=mock_gprc_call + ) + from_call.side_effect = None + from_call.return_value = st + # happy case - errors returned in a map + ack_errors = streaming_pull_manager._get_ack_errors(exception) + assert ack_errors + assert ack_errors["ack_1"] == "error1" + + +def test_process_requests_no_requests(): + # no requests so no items in results lists + ack_reqs_dict = {} + errors_dict = {} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert not requests_completed + assert not requests_to_retry + + +def test_process_requests_error_dict_is_none(): + # it's valid to pass in `None` for `errors_dict` + ack_reqs_dict = {} + errors_dict = None + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert not requests_completed + assert not requests_to_retry + + +def test_process_requests_no_errors_has_no_future(): + # no errors so request should be completed, even with no future + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=None + ) + } + errors_dict = {} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert requests_completed[0].ack_id == "ackid1" + assert not requests_to_retry + + +def test_process_requests_no_errors(): + # no errors so request and its future should be completed + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=future + ) + } + errors_dict = {} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert requests_completed[0].ack_id == "ackid1" + assert future.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + assert not requests_to_retry + + +def test_process_requests_no_errors_no_future(): + # no errors, request should be completed, even when future is None. + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=None + ) + } + errors_dict = {} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert requests_completed[0].ack_id == "ackid1" + assert not requests_to_retry + + +def test_process_requests_permanent_error_raises_exception(): + # a permanent error raises an exception + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=future + ) + } + errors_dict = {"ackid1": "PERMANENT_FAILURE_INVALID_ACK_ID"} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert requests_completed[0].ack_id == "ackid1" + with pytest.raises(subscriber_exceptions.AcknowledgeError) as exc_info: + future.result() + assert ( + exc_info.value.error_code + == subscriber_exceptions.AcknowledgeStatus.INVALID_ACK_ID + ) + assert not requests_to_retry + + +def test_process_requests_permanent_error_other_raises_exception(): + # a permanent error of other raises an exception + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=future + ) + } + errors_dict = {"ackid1": "PERMANENT_FAILURE_OTHER"} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert requests_completed[0].ack_id == "ackid1" + with pytest.raises(subscriber_exceptions.AcknowledgeError) as exc_info: + future.result() + assert exc_info.value.error_code == subscriber_exceptions.AcknowledgeStatus.OTHER + assert not requests_to_retry + + +def test_process_requests_permanent_error_other_raises_exception_no_future(): + # with a permanent error, request is completed even when future is None. + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=None + ) + } + errors_dict = {"ackid1": "PERMANENT_FAILURE_OTHER"} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert requests_completed[0].ack_id == "ackid1" + assert not requests_to_retry + + +def test_process_requests_transient_error_returns_request_for_retrying(): + # a transient error returns the request in `requests_to_retry` + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=future + ) + } + errors_dict = {"ackid1": "TRANSIENT_FAILURE_INVALID_ACK_ID"} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert not requests_completed + assert requests_to_retry[0].ack_id == "ackid1" + assert not future.done() + + +def test_process_requests_unknown_error_raises_exception(): + # an unknown error raises an exception + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=future + ) + } + errors_dict = {"ackid1": "unknown_error"} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert requests_completed[0].ack_id == "ackid1" + with pytest.raises(subscriber_exceptions.AcknowledgeError) as exc_info: + future.result() + assert exc_info.value.error_code == subscriber_exceptions.AcknowledgeStatus.OTHER + assert exc_info.value.info == "unknown_error" + assert not requests_to_retry + + +def test_process_requests_retriable_error_status_returns_request_for_retrying(): + # a retriable error status returns the request in `requests_to_retry` + retriable_errors = [ + code_pb2.DEADLINE_EXCEEDED, + code_pb2.RESOURCE_EXHAUSTED, + code_pb2.ABORTED, + code_pb2.INTERNAL, + code_pb2.UNAVAILABLE, + ] + + for retriable_error in retriable_errors: + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future, + ) + } + st = status_pb2.Status() + st.code = retriable_error + ( + requests_completed, + requests_to_retry, + ) = streaming_pull_manager._process_requests(st, ack_reqs_dict, None) + assert not requests_completed + assert requests_to_retry[0].ack_id == "ackid1" + assert not future.done() + + +def test_process_requests_permission_denied_error_status_raises_exception(): + # a permission-denied error status raises an exception + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=future + ) + } + st = status_pb2.Status() + st.code = code_pb2.Code.PERMISSION_DENIED + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + st, ack_reqs_dict, None + ) + assert requests_completed[0].ack_id == "ackid1" + with pytest.raises(subscriber_exceptions.AcknowledgeError) as exc_info: + future.result() + assert ( + exc_info.value.error_code + == subscriber_exceptions.AcknowledgeStatus.PERMISSION_DENIED + ) + assert exc_info.value.info is None + assert not requests_to_retry + + +def test_process_requests_failed_precondition_error_status_raises_exception(): + # a failed-precondition error status raises an exception + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=future + ) + } + st = status_pb2.Status() + st.code = code_pb2.Code.FAILED_PRECONDITION + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + st, ack_reqs_dict, None + ) + assert requests_completed[0].ack_id == "ackid1" + with pytest.raises(subscriber_exceptions.AcknowledgeError) as exc_info: + future.result() + assert ( + exc_info.value.error_code + == subscriber_exceptions.AcknowledgeStatus.FAILED_PRECONDITION + ) + assert exc_info.value.info is None + assert not requests_to_retry + + +def test_process_requests_other_error_status_raises_exception(): + # an unrecognized error status raises an exception + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=future + ) + } + st = status_pb2.Status() + st.code = code_pb2.Code.OUT_OF_RANGE + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + st, ack_reqs_dict, None + ) + assert requests_completed[0].ack_id == "ackid1" + with pytest.raises(subscriber_exceptions.AcknowledgeError) as exc_info: + future.result() + assert exc_info.value.error_code == subscriber_exceptions.AcknowledgeStatus.OTHER + assert not requests_to_retry + + +def test_process_requests_other_error_status_raises_exception_no_future(): + # with an unrecognized error status, requests are completed, even when + # future is None. + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=None + ) + } + st = status_pb2.Status() + st.code = code_pb2.Code.OUT_OF_RANGE + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + st, ack_reqs_dict, None + ) + assert requests_completed[0].ack_id == "ackid1" + assert not requests_to_retry + + +def test_process_requests_mixed_success_and_failure_acks(): + # mixed success and failure (acks) + future1 = futures.Future() + future2 = futures.Future() + future3 = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future1, + ), + "ackid2": requests.AckRequest( + ack_id="ackid2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future2, + ), + "ackid3": requests.AckRequest( + ack_id="ackid3", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future3, + ), + } + errors_dict = { + "ackid1": "PERMANENT_FAILURE_INVALID_ACK_ID", + "ackid2": "TRANSIENT_FAILURE_INVALID_ACK_ID", + } + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + # message with ack_id 'ackid1' fails with an exception + assert requests_completed[0].ack_id == "ackid1" + with pytest.raises(subscriber_exceptions.AcknowledgeError) as exc_info: + future1.result() + assert ( + exc_info.value.error_code + == subscriber_exceptions.AcknowledgeStatus.INVALID_ACK_ID + ) + # message with ack_id 'ackid2' is to be retried + assert requests_to_retry[0].ack_id == "ackid2" + assert not requests_to_retry[0].future.done() + # message with ack_id 'ackid3' succeeds + assert requests_completed[1].ack_id == "ackid3" + assert future3.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + + +def test_process_requests_mixed_success_and_failure_modacks(): + # mixed success and failure (modacks) + future1 = futures.Future() + future2 = futures.Future() + future3 = futures.Future() + ack_reqs_dict = { + "ackid1": requests.ModAckRequest(ack_id="ackid1", seconds=60, future=future1), + "ackid2": requests.ModAckRequest(ack_id="ackid2", seconds=60, future=future2), + "ackid3": requests.ModAckRequest(ack_id="ackid3", seconds=60, future=future3), + } + errors_dict = { + "ackid1": "PERMANENT_FAILURE_INVALID_ACK_ID", + "ackid2": "TRANSIENT_FAILURE_INVALID_ACK_ID", + } + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + # message with ack_id 'ackid1' fails with an exception + assert requests_completed[0].ack_id == "ackid1" + with pytest.raises(subscriber_exceptions.AcknowledgeError) as exc_info: + future1.result() + assert ( + exc_info.value.error_code + == subscriber_exceptions.AcknowledgeStatus.INVALID_ACK_ID + ) + # message with ack_id 'ackid2' is to be retried + assert requests_to_retry[0].ack_id == "ackid2" + assert not requests_to_retry[0].future.done() + # message with ack_id 'ackid3' succeeds + assert requests_completed[1].ack_id == "ackid3" + assert future3.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + + +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +def test_opentelemetry__on_response_subscribe_span_create(span_exporter): + manager, _, _, leaser, _, _ = make_running_manager( + enable_open_telemetry=True, + subscription_name="projects/projectID/subscriptions/subscriptionID", + ) + + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=42) + manager._callback = mock.sentinel.callback + + response = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="ack1", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ), + gapic_types.ReceivedMessage( + ack_id="ack2", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), + delivery_attempt=6, + ), + ] + ) + + manager._on_response(response) + + spans = span_exporter.get_finished_spans() + + # Subscribe span is still active, hence unexported. + # Subscriber scheduler spans corresponding to the two messages would be started in `messages_on_hold.put()`` + # and ended in `_maybe_release_messages` + assert len(spans) == 3 + modack_span = spans[0] + + for span in spans[1:]: + assert span.name == "subscriber scheduler" + assert span.kind == trace.SpanKind.INTERNAL + assert span.parent is not None + assert len(span.attributes) == 0 + + assert modack_span.name == "subscriptionID modack" + assert modack_span.kind == trace.SpanKind.CLIENT + assert modack_span.parent is None + assert len(modack_span.links) == 2 + + +RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=datetime.timezone.utc) +RECEIVED_SECONDS = datetime_helpers.to_milliseconds(RECEIVED) // 1000 +PUBLISHED_MICROS = 123456 +PUBLISHED = RECEIVED + datetime.timedelta(days=1, microseconds=PUBLISHED_MICROS) +PUBLISHED_SECONDS = datetime_helpers.to_milliseconds(PUBLISHED) // 1000 + + +def create_message( + data, + ack_id="ACKID", + delivery_attempt=0, + ordering_key="", + exactly_once_delivery_enabled=False, + **attrs, +): # pragma: NO COVER + with mock.patch.object(time, "time") as time_: + time_.return_value = RECEIVED_SECONDS + gapic_pubsub_message = PubsubMessage( + attributes=attrs, + data=data, + message_id="message_id", + publish_time=timestamp_pb2.Timestamp( + seconds=PUBLISHED_SECONDS, nanos=PUBLISHED_MICROS * 1000 + ), + ordering_key=ordering_key, + ) + msg = Message( + # The code under test uses a raw protobuf PubsubMessage, i.e. w/o additional + # Python class wrappers, hence the "_pb" + message=gapic_pubsub_message._pb, + ack_id=ack_id, + delivery_attempt=delivery_attempt, + request_queue=queue.Queue(), + exactly_once_delivery_enabled_func=lambda: exactly_once_delivery_enabled, + ) + return msg + + +def test_opentelemetry_subscriber_concurrency_control_span(span_exporter): + manager, _, _, leaser, _, _ = make_running_manager( + enable_open_telemetry=True, + subscription_name="projects/projectID/subscriptions/subscriptionID", + ) + manager._callback = mock.Mock() + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + msg.opentelemetry_data = opentelemetry_data + manager._schedule_message_on_hold(msg) + opentelemetry_data.end_subscribe_concurrency_control_span() + opentelemetry_data.end_subscribe_span() + + spans = span_exporter.get_finished_spans() + assert len(spans) == 2 + + concurrency_control_span, subscribe_span = spans + assert concurrency_control_span.name == "subscriber concurrency control" + assert subscribe_span.name == "subscriptionID subscribe" + assert opentelemetry_data.subscription_id == "subscriptionID" + + assert concurrency_control_span.parent == subscribe_span.context + + +def test_opentelemetry_subscriber_concurrency_control_span_end(span_exporter): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + opentelemetry_data.start_subscribe_concurrency_control_span() + msg.opentelemetry_data = opentelemetry_data + streaming_pull_manager._wrap_callback_errors(mock.Mock(), mock.Mock(), msg) + + spans = span_exporter.get_finished_spans() + assert len(spans) == 2 + + concurrency_control_span = spans[0] + assert concurrency_control_span.name == "subscriber concurrency control" + + +def test_opentelemetry_wrap_callback_error(span_exporter): + msg = create_message(b"foo") + streaming_pull_manager._wrap_callback_errors(mock.Mock(), mock.Mock(), msg) + + spans = span_exporter.get_finished_spans() + assert len(spans) == 0 diff --git a/tests/unit/pubsub_v1/subscriber/test_subscribe_opentelemetry.py b/tests/unit/pubsub_v1/subscriber/test_subscribe_opentelemetry.py new file mode 100644 index 000000000..2fb89aa7c --- /dev/null +++ b/tests/unit/pubsub_v1/subscriber/test_subscribe_opentelemetry.py @@ -0,0 +1,202 @@ +# Copyright 2024, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import time +import sys +import queue +import pytest + +from google.protobuf import timestamp_pb2 +from google.api_core import datetime_helpers +from opentelemetry import trace +from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator +from google.cloud.pubsub_v1.open_telemetry.context_propagation import ( + OpenTelemetryContextSetter, +) + +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) +from google.cloud.pubsub_v1.subscriber.message import Message +from google.cloud.pubsub_v1.types import PubsubMessage + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + +RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=datetime.timezone.utc) +RECEIVED_SECONDS = datetime_helpers.to_milliseconds(RECEIVED) // 1000 +PUBLISHED_MICROS = 123456 +PUBLISHED = RECEIVED + datetime.timedelta(days=1, microseconds=PUBLISHED_MICROS) +PUBLISHED_SECONDS = datetime_helpers.to_milliseconds(PUBLISHED) // 1000 + + +def create_message( + data, + ack_id="ACKID", + delivery_attempt=0, + ordering_key="", + exactly_once_delivery_enabled=False, + **attrs +): # pragma: NO COVER + with mock.patch.object(time, "time") as time_: + time_.return_value = RECEIVED_SECONDS + gapic_pubsub_message = PubsubMessage( + attributes=attrs, + data=data, + message_id="message_id", + publish_time=timestamp_pb2.Timestamp( + seconds=PUBLISHED_SECONDS, nanos=PUBLISHED_MICROS * 1000 + ), + ordering_key=ordering_key, + ) + msg = Message( + # The code under test uses a raw protobuf PubsubMessage, i.e. w/o additional + # Python class wrappers, hence the "_pb" + message=gapic_pubsub_message._pb, + ack_id=ack_id, + delivery_attempt=delivery_attempt, + request_queue=queue.Queue(), + exactly_once_delivery_enabled_func=lambda: exactly_once_delivery_enabled, + ) + return msg + + +def test_opentelemetry_set_subscribe_span_result(span_exporter): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + msg.opentelemetry_data = opentelemetry_data + opentelemetry_data.set_subscribe_span_result("acked") + opentelemetry_data.end_subscribe_span() + spans = span_exporter.get_finished_spans() + + assert len(spans) == 1 + + assert "messaging.gcp_pubsub.result" in spans[0].attributes + assert spans[0].attributes["messaging.gcp_pubsub.result"] == "acked" + + +def test_opentelemetry_set_subscribe_span_result_assert_error(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + with pytest.raises(AssertionError): + opentelemetry_data.set_subscribe_span_result("hi") + + +def test_opentelemetry_start_subscribe_concurrency_control_span_no_subscribe_span(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + with pytest.raises(AssertionError): + opentelemetry_data.start_subscribe_concurrency_control_span() + + +def test_opentelemetry_end_subscribe_concurrency_control_span_assertion_error(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + with pytest.raises(AssertionError): + opentelemetry_data.end_subscribe_concurrency_control_span() + + +def test_opentelemetry_start_subscribe_scheduler_span_assertion_error(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + with pytest.raises(AssertionError): + opentelemetry_data.start_subscribe_scheduler_span() + + +def test_opentelemetry_end_subscribe_scheduler_span_assertion_error(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + with pytest.raises(AssertionError): + opentelemetry_data.end_subscribe_scheduler_span() + + +def test_opentelemetry_start_process_span_assertion_error(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + with pytest.raises(AssertionError): + opentelemetry_data.start_process_span() + + +def test_opentelemetry_end_process_span_assertion_error(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + with pytest.raises(AssertionError): + opentelemetry_data.end_process_span() + + +def test_opentelemetry_start_process_span_publisher_link(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + msg.opentelemetry_data = opentelemetry_data + tracer = trace.get_tracer("foo") + publisher_create_span = None + with tracer.start_as_current_span(name="name") as span: + publisher_create_span = span + TraceContextTextMapPropagator().inject( + carrier=msg._message, + setter=OpenTelemetryContextSetter(), + ) + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + opentelemetry_data.start_process_span() + assert len(opentelemetry_data._process_span.links) == 1 + assert ( + opentelemetry_data._process_span.links[0].context.span_id + == publisher_create_span.get_span_context().span_id + ) + + +def test_opentelemetry_start_process_span_no_publisher_span(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + msg.opentelemetry_data = opentelemetry_data + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + opentelemetry_data.start_process_span() + # Assert that when no context is propagated, the subscriber span has no parent. + assert opentelemetry_data._subscribe_span.parent is None + # Assert that when there is no publisher create span context propagated, + # There are no links created in the process span. + assert len(opentelemetry_data._process_span.links) == 0 + + +def test_opentelemetry_project_id_set_after_create_subscribe_span(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + msg.opentelemetry_data = opentelemetry_data + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + assert opentelemetry_data.project_id == "projectId" diff --git a/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 310485279..3d3ff0111 100644 --- a/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -12,59 +12,127 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.auth import credentials -import mock +import sys +import grpc + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + +import pytest + +from google.api_core.gapic_v1.client_info import METRICS_METADATA_KEY from google.cloud.pubsub_v1 import subscriber -from google.cloud.pubsub_v1.gapic import subscriber_client from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import futures +from google.pubsub_v1.services.subscriber import client as subscriber_client +from google.pubsub_v1.services.subscriber.transports.grpc import SubscriberGrpcTransport +from google.cloud.pubsub_v1.open_telemetry.context_propagation import ( + OpenTelemetryContextGetter, +) +from google.pubsub_v1.types import PubsubMessage -def test_init(): - creds = mock.Mock(spec=credentials.Credentials) - client = subscriber.Client(credentials=creds) - assert isinstance(client.api, subscriber_client.SubscriberClient) - +# Attempt to use `_thunk` to obtain the underlying grpc channel from +# the intercept channel. Default to obtaining the grpc channel directly +# for backwards compatibility. +# TODO(https://github.com/grpc/grpc/issues/38519): Workaround to obtain a channel +# until a public API is available. +def get_pull_channel(client): + try: + return client._transport.pull._thunk("")._channel + except AttributeError: + return client._transport.pull._channel -def test_init_w_custom_transport(): - transport = object() - client = subscriber.Client(transport=transport) - assert isinstance(client.api, subscriber_client.SubscriberClient) - assert client.api.transport is transport +def test_init_default_client_info(creds): + client = subscriber.Client(credentials=creds) -def test_init_w_api_endpoint(): - client_options = {"api_endpoint": "testendpoint.google.com"} - client = subscriber.Client(client_options=client_options) + installed_version = subscriber.client.__version__ + expected_client_info = f"gccl/{installed_version}" - assert isinstance(client.api, subscriber_client.SubscriberClient) - assert (client.api.transport._channel._channel.target()).decode( - "utf-8" - ) == "testendpoint.google.com" + for wrapped_method in client.transport._wrapped_methods.values(): + user_agent = next( + ( + header_value + for header, header_value in wrapped_method._metadata + if header == METRICS_METADATA_KEY + ), + None, # pragma: NO COVER + ) + assert user_agent is not None + assert expected_client_info in user_agent -def test_init_w_unicode_api_endpoint(): - client_options = {"api_endpoint": u"testendpoint.google.com"} - client = subscriber.Client(client_options=client_options) +def test_init_default_closed_state(creds): + client = subscriber.Client(credentials=creds) + assert not client.closed - assert isinstance(client.api, subscriber_client.SubscriberClient) - assert (client.api.transport._channel._channel.target()).decode( - "utf-8" - ) == "testendpoint.google.com" +def test_init_w_custom_transport(creds): + transport = SubscriberGrpcTransport(credentials=creds) + client = subscriber.Client(transport=transport) + assert client._transport is transport -def test_init_w_empty_client_options(): - client = subscriber.Client(client_options={}) - assert isinstance(client.api, subscriber_client.SubscriberClient) - assert (client.api.transport._channel._channel.target()).decode( +def test_init_w_api_endpoint(creds): + client_options = {"api_endpoint": "testendpoint.google.com"} + client = subscriber.Client(client_options=client_options, credentials=creds) + + # Behavior to include dns prefix changed in gRPCv1.63 + grpc_major, grpc_minor = [int(part) for part in grpc.__version__.split(".")[0:2]] + if grpc_major > 1 or (grpc_major == 1 and grpc_minor >= 63): + _EXPECTED_TARGET = "dns:///testendpoint.google.com:443" + else: + _EXPECTED_TARGET = "testendpoint.google.com:443" + assert (client._transport.grpc_channel._channel.target()).decode( + "utf-8" + ) == _EXPECTED_TARGET + + +def test_init_w_empty_client_options(creds): + client = subscriber.Client(client_options={}, credentials=creds) + # Behavior to include dns prefix changed in gRPCv1.63 + grpc_major, grpc_minor = [int(part) for part in grpc.__version__.split(".")[0:2]] + if grpc_major > 1 or (grpc_major == 1 and grpc_minor >= 63): + _EXPECTED_TARGET = "dns:///pubsub.googleapis.com:443" + else: + _EXPECTED_TARGET = "pubsub.googleapis.com:443" + assert (client._transport.grpc_channel._channel.target()).decode( "utf-8" - ) == subscriber_client.SubscriberClient.SERVICE_ADDRESS + ) == _EXPECTED_TARGET + + +def test_init_client_options_pass_through(): + mock_ssl_creds = grpc.ssl_channel_credentials() + + def init(self, *args, **kwargs): + self.kwargs = kwargs + self._transport = mock.Mock() + self._transport._host = "testendpoint.google.com" + self._transport._ssl_channel_credentials = mock_ssl_creds + + with mock.patch.object(subscriber_client.SubscriberClient, "__init__", init): + client = subscriber.Client( + client_options={ + "quota_project_id": "42", + "scopes": [], + "credentials_file": "file.json", + } + ) + client_options = client.kwargs["client_options"] + assert client_options.get("quota_project_id") == "42" + assert client_options.get("scopes") == [] + assert client_options.get("credentials_file") == "file.json" + assert client.target == "testendpoint.google.com" + assert client.transport._ssl_channel_credentials == mock_ssl_creds def test_init_emulator(monkeypatch): - monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/baz/bacon/") + monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/baz/bacon:123") # NOTE: When the emulator host is set, a custom channel will be used, so # no credentials (mock ot otherwise) can be passed in. client = subscriber.Client() @@ -73,8 +141,14 @@ def test_init_emulator(monkeypatch): # # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. - channel = client.api.transport.pull._channel - assert channel.target().decode("utf8") == "/baz/bacon/" + channel = get_pull_channel(client) + # Behavior to include dns prefix changed in gRPCv1.63 + grpc_major, grpc_minor = [int(part) for part in grpc.__version__.split(".")[0:2]] + if grpc_major > 1 or (grpc_major == 1 and grpc_minor >= 63): + _EXPECTED_TARGET = "dns:////baz/bacon:123" + else: + _EXPECTED_TARGET = "/baz/bacon:123" + assert channel.target().decode("utf8") == _EXPECTED_TARGET def test_class_method_factory(): @@ -93,14 +167,14 @@ def test_class_method_factory(): "StreamingPullManager.open", autospec=True, ) -def test_subscribe(manager_open): - creds = mock.Mock(spec=credentials.Credentials) +def test_subscribe(manager_open, creds): client = subscriber.Client(credentials=creds) future = client.subscribe("sub_name_a", callback=mock.sentinel.callback) assert isinstance(future, futures.StreamingPullFuture) - assert future._manager._subscription == "sub_name_a" + manager = future._StreamingPullFuture__manager + assert manager._subscription == "sub_name_a" manager_open.assert_called_once_with( mock.ANY, callback=mock.sentinel.callback, @@ -113,8 +187,7 @@ def test_subscribe(manager_open): "StreamingPullManager.open", autospec=True, ) -def test_subscribe_options(manager_open): - creds = mock.Mock(spec=credentials.Credentials) +def test_subscribe_options(manager_open, creds): client = subscriber.Client(credentials=creds) flow_control = types.FlowControl(max_bytes=42) scheduler = mock.sentinel.scheduler @@ -124,12 +197,15 @@ def test_subscribe_options(manager_open): callback=mock.sentinel.callback, flow_control=flow_control, scheduler=scheduler, + await_callbacks_on_shutdown=mock.sentinel.await_callbacks, ) assert isinstance(future, futures.StreamingPullFuture) - assert future._manager._subscription == "sub_name_a" - assert future._manager.flow_control == flow_control - assert future._manager._scheduler == scheduler + manager = future._StreamingPullFuture__manager + assert manager._subscription == "sub_name_a" + assert manager.flow_control == flow_control + assert manager._scheduler == scheduler + assert manager._await_callbacks_on_shutdown is mock.sentinel.await_callbacks manager_open.assert_called_once_with( mock.ANY, callback=mock.sentinel.callback, @@ -137,32 +213,168 @@ def test_subscribe_options(manager_open): ) -def test_close(): - mock_transport = mock.NonCallableMock() - client = subscriber.Client(transport=mock_transport) +def test_close(creds): + client = subscriber.Client(credentials=creds) + patcher = mock.patch.object(client._transport.grpc_channel, "close") - client.close() + with patcher as patched_close: + client.close() - mock_transport.channel.close.assert_called() + patched_close.assert_called() + assert client.closed -def test_closes_channel_as_context_manager(): - mock_transport = mock.NonCallableMock() - client = subscriber.Client(transport=mock_transport) +def test_closes_channel_as_context_manager(creds): + client = subscriber.Client(credentials=creds) + patcher = mock.patch.object(client._transport.grpc_channel, "close") - with client: - pass + with patcher as patched_close: + with client: + pass - mock_transport.channel.close.assert_called() + patched_close.assert_called() -def test_streaming_pull_gapic_monkeypatch(): - transport = mock.NonCallableMock(spec=["streaming_pull"]) - transport.streaming_pull = mock.Mock(spec=[]) - client = subscriber.Client(transport=transport) +def test_context_manager_raises_if_closed(creds): + client = subscriber.Client(credentials=creds) + + with mock.patch.object(client._transport.grpc_channel, "close"): + client.close() + + expetect_msg = r"(?i).*closed.*cannot.*context manager.*" + with pytest.raises(RuntimeError, match=expetect_msg): + with client: + pass # pragma: NO COVER + + +def test_api_property_deprecated(creds): + client = subscriber.Client(credentials=creds) + + with pytest.warns(DeprecationWarning, match="client.api") as warned: + client.api - client.streaming_pull(requests=iter([])) + assert len(warned) == 1 + assert issubclass(warned[0].category, DeprecationWarning) + warning_msg = str(warned[0].message) + assert "client.api" in warning_msg - assert client.api.transport is transport + +def test_api_property_proxy_to_generated_client(creds): + client = subscriber.Client(credentials=creds) + + with pytest.warns(DeprecationWarning, match="client.api"): + api_object = client.api + + # Not a perfect check, but we are satisficed if the returned API object indeed + # contains all methods of the generated class. + superclass_attrs = (attr for attr in dir(type(client).__mro__[1])) + assert all( + hasattr(api_object, attr) + for attr in superclass_attrs + if callable(getattr(client, attr)) + ) + + # The close() method only exists on the hand-written wrapper class. + assert hasattr(client, "close") + assert not hasattr(api_object, "close") + + +def test_streaming_pull_gapic_monkeypatch(creds): + client = subscriber.Client(credentials=creds) + + with mock.patch("google.api_core.gapic_v1.method.wrap_method"): + client.streaming_pull(requests=iter([])) + + transport = client._transport assert hasattr(transport.streaming_pull, "_prefetch_first_result_") assert not transport.streaming_pull._prefetch_first_result_ + + +def test_sync_pull_warning_if_return_immediately(creds): + client = subscriber.Client(credentials=creds) + subscription_path = "projects/foo/subscriptions/bar" + + with mock.patch.object(client._transport, "_wrapped_methods"), pytest.warns( + DeprecationWarning, + match="The return_immediately flag is deprecated and should be set to False", + ) as warned: + client.pull(subscription=subscription_path, return_immediately=True) + + # Setting the deprecated return_immediately flag to True should emit a warning. + assert len(warned) == 1 + assert issubclass(warned[0].category, DeprecationWarning) + warning_msg = str(warned[0].message) + assert "return_immediately" in warning_msg + assert "deprecated" in warning_msg + + +@pytest.mark.asyncio +async def test_sync_pull_warning_if_return_immediately_async(creds): + from google.pubsub_v1.services.subscriber.async_client import SubscriberAsyncClient + + client = SubscriberAsyncClient(credentials=creds) + subscription_path = "projects/foo/subscriptions/bar" + + patcher = mock.patch.object( + type(client.transport.pull), + "__call__", + new_callable=mock.AsyncMock, + ) + + with patcher, pytest.warns( + DeprecationWarning, + match="The return_immediately flag is deprecated and should be set to False", + ) as warned: + await client.pull(subscription=subscription_path, return_immediately=True) + + # Setting the deprecated return_immediately flag to True should emit a warning. + assert len(warned) == 1 + assert issubclass(warned[0].category, DeprecationWarning) + warning_msg = str(warned[0].message) + assert "return_immediately" in warning_msg + assert "deprecated" in warning_msg + + +@pytest.mark.parametrize( + "enable_open_telemetry", + [ + True, + False, + ], +) +def test_opentelemetry_subscriber_setting(creds, enable_open_telemetry): + options = types.SubscriberOptions( + enable_open_telemetry_tracing=enable_open_telemetry, + ) + if sys.version_info >= (3, 8) or enable_open_telemetry is False: + client = subscriber.Client(credentials=creds, subscriber_options=options) + assert client.subscriber_options == options + assert client._open_telemetry_enabled == enable_open_telemetry + else: + with pytest.warns( + RuntimeWarning, + match="Open Telemetry for Python version 3.7 or lower is not supported. Disabling Open Telemetry tracing.", + ): + client = subscriber.Client(credentials=creds, subscriber_options=options) + assert client._open_telemetry_enabled is False + + +def test_opentelemetry_propagator_get(): + message = PubsubMessage(data=b"foo") + message.attributes["key1"] = "value1" + message.attributes["googclient_key2"] = "value2" + + assert OpenTelemetryContextGetter().get(message, "key2") == ["value2"] + + assert OpenTelemetryContextGetter().get(message, "key1") is None + + +def test_opentelemetry_propagator_keys(): + message = PubsubMessage(data=b"foo") + message.attributes["key1"] = "value1" + message.attributes["googclient_key2"] = "value2" + + assert sorted(OpenTelemetryContextGetter().keys(message)) == [ + "googclient_key2", + "key1", + ] diff --git a/tests/unit/pubsub_v1/test__gapic.py b/tests/unit/pubsub_v1/test__gapic.py deleted file mode 100644 index 5478aee18..000000000 --- a/tests/unit/pubsub_v1/test__gapic.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from google.cloud.pubsub_v1 import _gapic - - -class SourceClass(object): - def __init__(self): - self.x = "x" - - def method(self): - return "source class instance method" - - @staticmethod - def static_method(): - return "source class static method" - - @classmethod - def class_method(cls): - return "source class class method" - - @classmethod - def blacklisted_method(cls): - return "source class blacklisted method" - - -def test_add_method(): - @_gapic.add_methods(SourceClass, ("blacklisted_method",)) - class Foo(object): - def __init__(self): - self.api = SourceClass() - - def method(self): - return "foo class instance method" - - foo = Foo() - - # Any method that's callable and not blacklisted is "inherited". - assert set(["method", "static_method", "class_method"]) <= set(dir(foo)) - assert "blacklisted_method" not in dir(foo) - - # Source Class's static and class methods become static methods. - assert type(Foo.__dict__["static_method"]) == staticmethod - assert foo.static_method() == "source class static method" - assert type(Foo.__dict__["class_method"]) == staticmethod - assert foo.class_method() == "source class class method" - - # The decorator changes the behavior of instance methods of the wrapped class. - # method() is called on an instance of the Source Class (stored as an - # attribute on the wrapped class). - assert foo.method() == "source class instance method" diff --git a/tests/unit/pubsub_v1/test_futures.py b/tests/unit/pubsub_v1/test_futures.py index 11349d5d4..5a4dad41a 100644 --- a/tests/unit/pubsub_v1/test_futures.py +++ b/tests/unit/pubsub_v1/test_futures.py @@ -12,9 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import concurrent.futures +import sys import threading +import time + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock -import mock import pytest from google.cloud.pubsub_v1 import exceptions @@ -25,36 +33,6 @@ def _future(*args, **kwargs): return futures.Future(*args, **kwargs) -def test_constructor_defaults(): - with mock.patch.object(threading, "Event", autospec=True) as Event: - future = _future() - - assert future._result == futures.Future._SENTINEL - assert future._exception == futures.Future._SENTINEL - assert future._callbacks == [] - assert future._completed is Event.return_value - - Event.assert_called_once_with() - - -def test_constructor_explicit_completed(): - completed = mock.sentinel.completed - future = _future(completed=completed) - - assert future._result == futures.Future._SENTINEL - assert future._exception == futures.Future._SENTINEL - assert future._callbacks == [] - assert future._completed is completed - - -def test_cancel(): - assert _future().cancel() is False - - -def test_cancelled(): - assert _future().cancelled() is False - - def test_running(): future = _future() assert future.running() is True @@ -112,8 +90,8 @@ def test_add_done_callback_pending_batch(): future = _future() callback = mock.Mock() future.add_done_callback(callback) - assert len(future._callbacks) == 1 - assert callback in future._callbacks + assert len(future._done_callbacks) == 1 + assert callback in future._done_callbacks assert callback.call_count == 0 @@ -134,15 +112,76 @@ def test_trigger(): callback.assert_called_once_with(future) +def test_set_running_or_notify_cancel_not_implemented_error(): + future = _future() + with pytest.raises(NotImplementedError) as exc_info: + future.set_running_or_notify_cancel() + + assert exc_info.value.args + error_msg = exc_info.value.args[0] + assert "used by executors" in error_msg + assert "concurrent.futures" in error_msg + + +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="InvalidStateError is only available in Python 3.8+", +) def test_set_result_once_only(): future = _future() future.set_result("12345") - with pytest.raises(RuntimeError): + with pytest.raises(concurrent.futures.InvalidStateError): future.set_result("67890") +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="InvalidStateError is only available in Python 3.8+", +) def test_set_exception_once_only(): future = _future() future.set_exception(ValueError("wah wah")) - with pytest.raises(RuntimeError): + with pytest.raises(concurrent.futures.InvalidStateError): future.set_exception(TypeError("other wah wah")) + + +def test_as_completed_compatibility(): + all_futures = {i: _future() for i in range(6)} + done_futures = [] + + def resolve_future(future_idx, delay=0): + time.sleep(delay) + future = all_futures[future_idx] + if future_idx % 2 == 0: + future.set_result(f"{future_idx}: I'm done!") + else: + future.set_exception(Exception(f"Future {future_idx} errored")) + + all_futures[2].set_result("2: I'm done!") + + # Start marking the futures as completed (either with success or error) at + # different times and check that ther "as completed" order is correct. + for future_idx, delay in ((0, 0.8), (3, 0.6), (1, 0.4), (5, 0.2)): + threading.Thread( + target=resolve_future, args=(future_idx, delay), daemon=True + ).start() + + try: + # Use a loop instead of a list comprehension to gather futures completed + # before the timeout error occurs. + for future in concurrent.futures.as_completed(all_futures.values(), timeout=1): + done_futures.append(future) + except concurrent.futures.TimeoutError: + pass + else: # pragma: NO COVER + pytest.fail("Not all Futures should have been recognized as completed.") + + # NOTE: Future 4 was never resolved. + expected = [ + all_futures[2], + all_futures[5], + all_futures[1], + all_futures[3], + all_futures[0], + ] + assert done_futures == expected diff --git a/tests/unit/test_packaging.py b/tests/unit/test_packaging.py new file mode 100644 index 000000000..6dc70e3d1 --- /dev/null +++ b/tests/unit/test_packaging.py @@ -0,0 +1,37 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess +import sys + + +def test_namespace_package_compat(tmp_path): + # The ``google`` namespace package should not be masked + # by the presence of ``google-cloud-pubsub``. + google = tmp_path / "google" + google.mkdir() + google.joinpath("othermod.py").write_text("") + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + cmd = [sys.executable, "-m", "google.othermod"] + subprocess.check_call(cmd, env=env) + + # The ``google.cloud`` namespace package should not be masked + # by the presence of ``google-cloud-pubsub``. + google_cloud = tmp_path / "google" / "cloud" + google_cloud.mkdir() + google_cloud.joinpath("othermod.py").write_text("") + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + cmd = [sys.executable, "-m", "google.cloud.othermod"] + subprocess.check_call(cmd, env=env)