From b64cf1222692dea6f4dac155de3db2bf1af58732 Mon Sep 17 00:00:00 2001 From: kgala2 Date: Mon, 24 Feb 2025 22:19:29 +0000 Subject: [PATCH 1/8] chore: audit test csql instances (#1238) --- .github/workflows/tests.yml | 4 ---- CONTRIBUTING.md | 8 ++------ tests/system/test_asyncpg_iam_auth.py | 4 ++-- tests/system/test_pg8000_iam_auth.py | 4 ++-- tests/system/test_pymysql_iam_auth.py | 4 ++-- 5 files changed, 8 insertions(+), 16 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 4218e23b7..e710138f6 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -68,13 +68,11 @@ jobs: with: secrets: |- MYSQL_CONNECTION_NAME:${{ vars.GOOGLE_CLOUD_PROJECT }}/MYSQL_CONNECTION_NAME - MYSQL_IAM_CONNECTION_NAME:${{ vars.GOOGLE_CLOUD_PROJECT }}/MYSQL_IAM_CONNECTION_NAME MYSQL_USER:${{ vars.GOOGLE_CLOUD_PROJECT }}/MYSQL_USER MYSQL_IAM_USER:${{ vars.GOOGLE_CLOUD_PROJECT }}/MYSQL_USER_IAM_PYTHON MYSQL_PASS:${{ vars.GOOGLE_CLOUD_PROJECT }}/MYSQL_PASS MYSQL_DB:${{ vars.GOOGLE_CLOUD_PROJECT }}/MYSQL_DB POSTGRES_CONNECTION_NAME:${{ vars.GOOGLE_CLOUD_PROJECT }}/POSTGRES_CONNECTION_NAME - POSTGRES_IAM_CONNECTION_NAME:${{ vars.GOOGLE_CLOUD_PROJECT }}/POSTGRES_IAM_CONNECTION_NAME POSTGRES_USER:${{ vars.GOOGLE_CLOUD_PROJECT }}/POSTGRES_USER POSTGRES_IAM_USER:${{ vars.GOOGLE_CLOUD_PROJECT }}/POSTGRES_USER_IAM_PYTHON POSTGRES_PASS:${{ vars.GOOGLE_CLOUD_PROJECT }}/POSTGRES_PASS @@ -91,13 +89,11 @@ jobs: - name: Run tests env: MYSQL_CONNECTION_NAME: "${{ steps.secrets.outputs.MYSQL_CONNECTION_NAME }}" - MYSQL_IAM_CONNECTION_NAME: "${{ steps.secrets.outputs.MYSQL_IAM_CONNECTION_NAME }}" MYSQL_USER: "${{ steps.secrets.outputs.MYSQL_USER }}" MYSQL_IAM_USER: "${{ steps.secrets.outputs.MYSQL_IAM_USER }}" MYSQL_PASS: "${{ steps.secrets.outputs.MYSQL_PASS }}" MYSQL_DB: "${{ steps.secrets.outputs.MYSQL_DB }}" POSTGRES_CONNECTION_NAME: "${{ steps.secrets.outputs.POSTGRES_CONNECTION_NAME }}" - POSTGRES_IAM_CONNECTION_NAME: "${{ steps.secrets.outputs.POSTGRES_IAM_CONNECTION_NAME }}" POSTGRES_USER: "${{ steps.secrets.outputs.POSTGRES_USER }}" POSTGRES_IAM_USER: "${{ steps.secrets.outputs.POSTGRES_IAM_USER }}" POSTGRES_PASS: "${{ steps.secrets.outputs.POSTGRES_PASS }}" diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index df5533c87..43ece8953 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -57,23 +57,19 @@ To run all integration tests against real Cloud SQL instances 1. Once created, set the following environment variables: ```sh +# MySQL instance with IAM authentication enabled export MYSQL_CONNECTION_NAME="::" export MYSQL_USER="db_user" export MYSQL_PASS="db_pass" export MYSQL_DB="db_name" -# MySQL instance with IAM authentication enabled -# (can be same as MYSQL_CONNECTION_NAME) -export MYSQL_IAM_CONNECTION_NAME="::" # IAM Principal of ADC sourced credentials (truncated) export MYSQL_IAM_USER="test-user@gmail.com" +# Postgres instance with IAM authentication enabled export POSTGRES_CONNECTION_NAME="::" export POSTGRES_USER="db_user" export POSTGRES_PASS="db_pass" export POSTGRES_DB="db_name" -# Postgres instance with IAM authentication enabled -# (can be same as POSTGRES_CONNECTION_NAME) -export POSTGRES_IAM_CONNECTION_NAME="::" # IAM Principal of ADC sourced credentials export POSTGRES_IAM_USER="test-user@gmail.com" diff --git a/tests/system/test_asyncpg_iam_auth.py b/tests/system/test_asyncpg_iam_auth.py index 103efd1af..6e96d96bd 100644 --- a/tests/system/test_asyncpg_iam_auth.py +++ b/tests/system/test_asyncpg_iam_auth.py @@ -86,7 +86,7 @@ async def getconn() -> asyncpg.Connection: async def test_iam_authn_connection_with_asyncpg() -> None: """Basic test to get time from database.""" - inst_conn_name = os.environ["POSTGRES_IAM_CONNECTION_NAME"] + inst_conn_name = os.environ["POSTGRES_CONNECTION_NAME"] user = os.environ["POSTGRES_IAM_USER"] db = os.environ["POSTGRES_DB"] @@ -101,7 +101,7 @@ async def test_iam_authn_connection_with_asyncpg() -> None: async def test_lazy_iam_authn_connection_with_asyncpg() -> None: """Basic test to get time from database.""" - inst_conn_name = os.environ["POSTGRES_IAM_CONNECTION_NAME"] + inst_conn_name = os.environ["POSTGRES_CONNECTION_NAME"] user = os.environ["POSTGRES_IAM_USER"] db = os.environ["POSTGRES_DB"] diff --git a/tests/system/test_pg8000_iam_auth.py b/tests/system/test_pg8000_iam_auth.py index 60d2974f0..9a8607bcb 100644 --- a/tests/system/test_pg8000_iam_auth.py +++ b/tests/system/test_pg8000_iam_auth.py @@ -84,7 +84,7 @@ def getconn() -> pg8000.dbapi.Connection: def test_pg8000_iam_authn_connection() -> None: """Basic test to get time from database.""" - inst_conn_name = os.environ["POSTGRES_IAM_CONNECTION_NAME"] + inst_conn_name = os.environ["POSTGRES_CONNECTION_NAME"] user = os.environ["POSTGRES_IAM_USER"] db = os.environ["POSTGRES_DB"] @@ -99,7 +99,7 @@ def test_pg8000_iam_authn_connection() -> None: def test_lazy_pg8000_iam_authn_connection() -> None: """Basic test to get time from database.""" - inst_conn_name = os.environ["POSTGRES_IAM_CONNECTION_NAME"] + inst_conn_name = os.environ["POSTGRES_CONNECTION_NAME"] user = os.environ["POSTGRES_IAM_USER"] db = os.environ["POSTGRES_DB"] diff --git a/tests/system/test_pymysql_iam_auth.py b/tests/system/test_pymysql_iam_auth.py index 80a10a134..9a617b6f7 100644 --- a/tests/system/test_pymysql_iam_auth.py +++ b/tests/system/test_pymysql_iam_auth.py @@ -84,7 +84,7 @@ def getconn() -> pymysql.Connection: def test_pymysql_iam_authn_connection() -> None: """Basic test to get time from database.""" - inst_conn_name = os.environ["MYSQL_IAM_CONNECTION_NAME"] + inst_conn_name = os.environ["MYSQL_CONNECTION_NAME"] user = os.environ["MYSQL_IAM_USER"] db = os.environ["MYSQL_DB"] @@ -99,7 +99,7 @@ def test_pymysql_iam_authn_connection() -> None: def test_lazy_pymysql_iam_authn_connection() -> None: """Basic test to get time from database.""" - inst_conn_name = os.environ["MYSQL_IAM_CONNECTION_NAME"] + inst_conn_name = os.environ["MYSQL_CONNECTION_NAME"] user = os.environ["MYSQL_IAM_USER"] db = os.environ["MYSQL_DB"] From fb8c21c89af524bdd00c51eeb0bcb4a6bf4dbbd3 Mon Sep 17 00:00:00 2001 From: Jack Wotherspoon Date: Tue, 25 Feb 2025 12:54:52 -0500 Subject: [PATCH 2/8] build: remove .kokoro folder (#1237) --- .github/renovate.json5 | 1 - .kokoro/populate-secrets.sh | 43 --- .kokoro/release.sh | 29 -- .kokoro/release/common.cfg | 52 ---- .kokoro/release/release.cfg | 9 - .kokoro/requirements.in | 13 - .kokoro/requirements.txt | 527 ------------------------------------ .kokoro/trampoline.sh | 26 -- .kokoro/trampoline_v2.sh | 490 --------------------------------- 9 files changed, 1190 deletions(-) delete mode 100755 .kokoro/populate-secrets.sh delete mode 100755 .kokoro/release.sh delete mode 100644 .kokoro/release/common.cfg delete mode 100644 .kokoro/release/release.cfg delete mode 100644 .kokoro/requirements.in delete mode 100644 .kokoro/requirements.txt delete mode 100644 .kokoro/trampoline.sh delete mode 100644 .kokoro/trampoline_v2.sh diff --git a/.github/renovate.json5 b/.github/renovate.json5 index 03218ee0f..6cae0a4d6 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -7,7 +7,6 @@ ":separateMajorReleases", // https://docs.renovatebot.com/presets-default/#separatemajorreleases ":prConcurrentLimitNone", // View complete backlog as PRs. https://docs.renovatebot.com/presets-default/#prconcurrentlimitnone ], - "ignorePaths": [".kokoro/requirements.txt"], "rebaseWhen": "behind-base-branch", "dependencyDashboard": true, "dependencyDashboardLabels": ["type: process"], diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh deleted file mode 100755 index 529bb64dd..000000000 --- a/.kokoro/populate-secrets.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -# Copyright 2021 Google LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} -function msg { println "$*" >&2 ;} -function println { printf '%s\n' "$(now) $*" ;} - - -# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: -# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com -SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" -msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" -mkdir -p ${SECRET_LOCATION} -for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") -do - msg "Retrieving secret ${key}" - docker run --entrypoint=gcloud \ - --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ - gcr.io/google.com/cloudsdktool/cloud-sdk \ - secrets versions access latest \ - --project cloud-devrel-kokoro-resources \ - --secret ${key} > \ - "${SECRET_LOCATION}/${key}" - if [[ $? == 0 ]]; then - msg "Secret written to ${SECRET_LOCATION}/${key}" - else - msg "Error retrieving secret ${key}" - fi -done \ No newline at end of file diff --git a/.kokoro/release.sh b/.kokoro/release.sh deleted file mode 100755 index cfe9c6076..000000000 --- a/.kokoro/release.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Start the releasetool reporter -python3 -m pip install --require-hashes -r github/cloud-sql-python-connector/.kokoro/requirements.txt -python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-3") -cd github/cloud-sql-python-connector -python3 -m build --wheel -twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg deleted file mode 100644 index 7e1c01d71..000000000 --- a/.kokoro/release/common.cfg +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "cloud-sql-python-connector/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/cloud-sql-python-connector/.kokoro/release.sh" -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googlecloudplatform-installation,releasetool-publish-reporter-pem" -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-3" - } - } -} diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg deleted file mode 100644 index d42503c27..000000000 --- a/.kokoro/release/release.cfg +++ /dev/null @@ -1,9 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Save artifacts for EO 14028 -action { - define_artifacts { - regex: "github/cloud-sql-python-connector/**/*.tar.gz" - strip_prefix: "github/cloud-sql-python-connector" - } -} diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in deleted file mode 100644 index c94eeeac2..000000000 --- a/.kokoro/requirements.in +++ /dev/null @@ -1,13 +0,0 @@ -gcp-docuploader -gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x -importlib-metadata -typing-extensions -twine -wheel -setuptools -nox>=2022.11.21 # required to remove dependency on py -charset-normalizer<3 -click<8.1.0 -build -tomli -secretstorage diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt deleted file mode 100644 index af62d0dfc..000000000 --- a/.kokoro/requirements.txt +++ /dev/null @@ -1,527 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.11 -# by the following command: -# -# pip-compile --generate-hashes requirements.in -# -argcomplete==3.2.3 \ - --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ - --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c - # via nox -attrs==23.2.0 \ - --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ - --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 - # via gcp-releasetool -build==1.1.1 \ - --hash=sha256:8ed0851ee76e6e38adce47e4bee3b51c771d86c64cf578d0c2245567ee200e73 \ - --hash=sha256:8eea65bb45b1aac2e734ba2cc8dad3a6d97d97901a395bd0ed3e7b46953d2a31 - # via -r requirements.in -cachetools==5.3.3 \ - --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ - --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 - # via google-auth -certifi==2024.7.4 \ - --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ - --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 - # via requests -cffi==1.16.0 \ - --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ - --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ - --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ - --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ - --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ - --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ - --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ - --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ - --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ - --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ - --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ - --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ - --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ - --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ - --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ - --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ - --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ - --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ - --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ - --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ - --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ - --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ - --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ - --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ - --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ - --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ - --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ - --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ - --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ - --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ - --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ - --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ - --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ - --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ - --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ - --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ - --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ - --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ - --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ - --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ - --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ - --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ - --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ - --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ - --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ - --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ - --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ - --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ - --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ - --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ - --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ - --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 - # via cryptography -charset-normalizer==2.1.1 \ - --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ - --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via - # -r requirements.in - # requests -click==8.0.4 \ - --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ - --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb - # via - # -r requirements.in - # gcp-docuploader - # gcp-releasetool -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 - # via - # gcp-docuploader - # nox -cryptography==42.0.5 \ - --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ - --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ - --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ - --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ - --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ - --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ - --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ - --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ - --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ - --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ - --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ - --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ - --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ - --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ - --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ - --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ - --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ - --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ - --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ - --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ - --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ - --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ - --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ - --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ - --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ - --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ - --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ - --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ - --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ - --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ - --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ - --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 - # via - # gcp-releasetool - # secretstorage -distlib==0.3.8 \ - --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ - --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 - # via virtualenv -docutils==0.20.1 \ - --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ - --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b - # via readme-renderer -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -gcp-releasetool==2.0.0 \ - --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ - --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f - # via -r requirements.in -google-api-core==2.17.1 \ - --hash=sha256:610c5b90092c360736baccf17bd3efbcb30dd380e7a6dc28a71059edb8bd0d8e \ - --hash=sha256:9df18a1f87ee0df0bc4eea2770ebc4228392d8cc4066655b320e2cfccb15db95 - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.28.2 \ - --hash=sha256:80b8b4969aa9ed5938c7828308f20f035bc79f9d8fb8120bf9dc8db20b41ba30 \ - --hash=sha256:9fd67bbcd40f16d9d42f950228e9cf02a2ded4ae49198b27432d0cded5a74c38 - # via - # gcp-releasetool - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.4.1 \ - --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ - --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 - # via google-cloud-storage -google-cloud-storage==2.15.0 \ - --hash=sha256:5d9237f88b648e1d724a0f20b5cde65996a37fe51d75d17660b1404097327dd2 \ - --hash=sha256:7560a3c48a03d66c553dc55215d35883c680fe0ab44c23aa4832800ccc855c74 - # via gcp-docuploader -google-crc32c==1.5.0 \ - --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ - --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ - --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ - --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ - --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ - --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ - --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ - --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ - --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ - --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ - --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ - --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ - --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ - --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ - --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ - --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ - --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ - --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ - --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ - --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ - --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ - --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ - --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ - --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ - --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ - --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ - --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ - --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ - --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ - --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ - --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ - --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ - --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ - --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ - --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ - --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ - --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ - --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ - --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ - --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ - --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ - --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ - --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ - --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ - --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ - --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ - --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ - --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ - --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ - --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ - --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ - --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ - --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ - --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ - --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ - --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ - --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ - --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ - --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ - --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ - --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ - --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ - --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ - --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ - --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ - --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ - --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ - --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.0 \ - --hash=sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b \ - --hash=sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08 - # via google-cloud-storage -googleapis-common-protos==1.63.0 \ - --hash=sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e \ - --hash=sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632 - # via google-api-core -idna==3.7 \ - --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ - --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 - # via requests -importlib-metadata==6.11.0 \ - --hash=sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443 \ - --hash=sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b - # via - # -r requirements.in - # keyring - # twine -jaraco-classes==3.3.1 \ - --hash=sha256:86b534de565381f6b3c1c830d13f931d7be1a75f0081c57dff615578676e2206 \ - --hash=sha256:cb28a5ebda8bc47d8c8015307d93163464f9f2b91ab4006e09ff0ce07e8bfb30 - # via keyring -jeepney==0.8.0 \ - --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ - --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - # via - # keyring - # secretstorage -jinja2==3.1.5 \ - --hash=sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb \ - --hash=sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb - # via gcp-releasetool -keyring==24.3.1 \ - --hash=sha256:c3327b6ffafc0e8befbdb597cacdb4928ffe5c1212f7645f186e6d9957a898db \ - --hash=sha256:df38a4d7419a6a60fea5cef1e45a948a3e8430dd12ad88b0f423c5c143906218 - # via - # gcp-releasetool - # twine -markdown-it-py==3.0.0 \ - --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ - --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb - # via rich -markupsafe==2.1.5 \ - --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ - --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ - --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ - --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ - --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ - --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ - --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ - --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ - --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ - --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ - --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ - --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ - --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ - --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ - --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ - --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ - --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ - --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ - --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ - --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ - --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ - --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ - --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ - --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ - --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ - --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ - --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ - --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ - --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ - --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ - --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ - --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ - --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ - --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ - --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ - --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ - --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ - --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ - --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ - --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ - --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ - --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ - --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ - --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ - --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ - --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ - --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ - --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ - --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ - --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ - --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ - --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ - --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ - --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ - --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ - --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ - --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ - --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ - --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ - --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 - # via jinja2 -mdurl==0.1.2 \ - --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ - --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba - # via markdown-it-py -more-itertools==10.2.0 \ - --hash=sha256:686b06abe565edfab151cb8fd385a05651e1fdf8f0a14191e4439283421f8684 \ - --hash=sha256:8fccb480c43d3e99a00087634c06dd02b0d50fbf088b380de5a41a015ec239e1 - # via jaraco-classes -nh3==0.2.15 \ - --hash=sha256:0d02d0ff79dfd8208ed25a39c12cbda092388fff7f1662466e27d97ad011b770 \ - --hash=sha256:3277481293b868b2715907310c7be0f1b9d10491d5adf9fce11756a97e97eddf \ - --hash=sha256:3b803a5875e7234907f7d64777dfde2b93db992376f3d6d7af7f3bc347deb305 \ - --hash=sha256:427fecbb1031db085eaac9931362adf4a796428ef0163070c484b5a768e71601 \ - --hash=sha256:5f0d77272ce6d34db6c87b4f894f037d55183d9518f948bba236fe81e2bb4e28 \ - --hash=sha256:60684857cfa8fdbb74daa867e5cad3f0c9789415aba660614fe16cd66cbb9ec7 \ - --hash=sha256:6f42f99f0cf6312e470b6c09e04da31f9abaadcd3eb591d7d1a88ea931dca7f3 \ - --hash=sha256:86e447a63ca0b16318deb62498db4f76fc60699ce0a1231262880b38b6cff911 \ - --hash=sha256:8d595df02413aa38586c24811237e95937ef18304e108b7e92c890a06793e3bf \ - --hash=sha256:9c0d415f6b7f2338f93035bba5c0d8c1b464e538bfbb1d598acd47d7969284f0 \ - --hash=sha256:a5167a6403d19c515217b6bcaaa9be420974a6ac30e0da9e84d4fc67a5d474c5 \ - --hash=sha256:ac19c0d68cd42ecd7ead91a3a032fdfff23d29302dbb1311e641a130dfefba97 \ - --hash=sha256:b1e97221cedaf15a54f5243f2c5894bb12ca951ae4ddfd02a9d4ea9df9e1a29d \ - --hash=sha256:bc2d086fb540d0fa52ce35afaded4ea526b8fc4d3339f783db55c95de40ef02e \ - --hash=sha256:d1e30ff2d8d58fb2a14961f7aac1bbb1c51f9bdd7da727be35c63826060b0bf3 \ - --hash=sha256:f3b53ba93bb7725acab1e030bc2ecd012a817040fd7851b332f86e2f9bb98dc6 - # via readme-renderer -nox==2023.4.22 \ - --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ - --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f - # via -r requirements.in -packaging==23.2 \ - --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ - --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 - # via - # build - # gcp-releasetool - # nox -pkginfo==1.10.0 \ - --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ - --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 - # via twine -platformdirs==3.11.0 \ - --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ - --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e - # via virtualenv -protobuf==4.25.3 \ - --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ - --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ - --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ - --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ - --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ - --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ - --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ - --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ - --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ - --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ - --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 - # via - # gcp-docuploader - # gcp-releasetool - # google-api-core - # googleapis-common-protos -pyasn1==0.5.1 \ - --hash=sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58 \ - --hash=sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.3.0 \ - --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ - --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d - # via google-auth -pycparser==2.21 \ - --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ - --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 - # via cffi -pygments==2.17.2 \ - --hash=sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c \ - --hash=sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367 - # via - # readme-renderer - # rich -pyjwt==2.8.0 \ - --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ - --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 - # via gcp-releasetool -pyperclip==1.8.2 \ - --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 - # via gcp-releasetool -pyproject-hooks==1.0.0 \ - --hash=sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8 \ - --hash=sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5 - # via build -python-dateutil==2.9.0.post0 \ - --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ - --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 - # via gcp-releasetool -readme-renderer==42.0 \ - --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ - --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 - # via twine -requests==2.32.0 \ - --hash=sha256:f2c3881dddb70d056c5bd7600a4fae312b2a300e39be6a118d30b90bd27262b5 \ - --hash=sha256:fa5490319474c82ef1d2c9bc459d3652e3ae4ef4c4ebdd18a21145a47ca4b6b8 - # via - # gcp-releasetool - # google-api-core - # google-cloud-storage - # requests-toolbelt - # twine -requests-toolbelt==1.0.0 \ - --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ - --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 - # via twine -rfc3986==2.0.0 \ - --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ - --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c - # via twine -rich==13.7.1 \ - --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ - --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 - # via twine -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -secretstorage==3.3.3 \ - --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ - --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 - # via - # -r requirements.in - # keyring -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via - # gcp-docuploader - # python-dateutil -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f - # via -r requirements.in -twine==4.0.2 \ - --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ - --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 - # via -r requirements.in -typing-extensions==4.10.0 \ - --hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475 \ - --hash=sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb - # via -r requirements.in -urllib3==2.2.2 \ - --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ - --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 - # via - # requests - # twine -virtualenv==20.26.6 \ - --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ - --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 - # via nox -wheel==0.43.0 \ - --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ - --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 - # via -r requirements.in -zipp==3.19.1 \ - --hash=sha256:2828e64edb5386ea6a52e7ba7cdb17bb30a73a858f5eb6eb93d8d36f5ea26091 \ - --hash=sha256:35427f6d5594f4acf82d25541438348c26736fa9b3afa2754bcd63cdb99d8e8f - # via importlib-metadata - -# WARNING: The following packages were not pinned, but pip requires them to be -# pinned when the requirements file includes hashes and the requirement is not -# satisfied by a package already installed. Consider using the --allow-unsafe flag. -# setuptools \ No newline at end of file diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh deleted file mode 100644 index bf9cb5a1b..000000000 --- a/.kokoro/trampoline.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Always run the cleanup script, regardless of the success of bouncing into -# the container. -function cleanup() { - chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh - ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh - echo "cleanup"; -} -trap cleanup EXIT - -$(dirname $0)/populate-secrets.sh # Secret Manager secrets. -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh deleted file mode 100644 index 5b87e2b1e..000000000 --- a/.kokoro/trampoline_v2.sh +++ /dev/null @@ -1,490 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# trampoline_v2.sh -# -# If you want to make a change to this file, consider doing so at: -# https://github.com/googlecloudplatform/docker-ci-helpers -# -# This script is for running CI builds. For Kokoro builds, we -# set this script to `build_file` field in the Kokoro configuration. - -# This script does 3 things. -# -# 1. Prepare the Docker image for the test -# 2. Run the Docker with appropriate flags to run the test -# 3. Upload the newly built Docker image -# -# in a way that is somewhat compatible with trampoline_v1. -# -# These environment variables are required: -# TRAMPOLINE_IMAGE: The docker image to use. -# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. -# -# You can optionally change these environment variables: -# TRAMPOLINE_IMAGE_UPLOAD: -# (true|false): Whether to upload the Docker image after the -# successful builds. -# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. -# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. -# Defaults to /workspace. -# Potentially there are some repo specific envvars in .trampolinerc in -# the project root. -# -# Here is an example for running this script. -# TRAMPOLINE_IMAGE=gcr.io/cloud-devrel-kokoro-resources/node:10-user \ -# TRAMPOLINE_BUILD_FILE=.kokoro/system-test.sh \ -# .kokoro/trampoline_v2.sh - -set -euo pipefail - -TRAMPOLINE_VERSION="2.0.7" - -if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then - readonly IO_COLOR_RED="$(tput setaf 1)" - readonly IO_COLOR_GREEN="$(tput setaf 2)" - readonly IO_COLOR_YELLOW="$(tput setaf 3)" - readonly IO_COLOR_RESET="$(tput sgr0)" -else - readonly IO_COLOR_RED="" - readonly IO_COLOR_GREEN="" - readonly IO_COLOR_YELLOW="" - readonly IO_COLOR_RESET="" -fi - -function function_exists { - [ $(LC_ALL=C type -t $1)"" == "function" ] -} - -# Logs a message using the given color. The first argument must be one -# of the IO_COLOR_* variables defined above, such as -# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the -# given color. The log message will also have an RFC-3339 timestamp -# prepended (in UTC). You can disable the color output by setting -# TERM=vt100. -function log_impl() { - local color="$1" - shift - local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" - echo "================================================================" - echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" - echo "================================================================" -} - -# Logs the given message with normal coloring and a timestamp. -function log() { - log_impl "${IO_COLOR_RESET}" "$@" -} - -# Logs the given message in green with a timestamp. -function log_green() { - log_impl "${IO_COLOR_GREEN}" "$@" -} - -# Logs the given message in yellow with a timestamp. -function log_yellow() { - log_impl "${IO_COLOR_YELLOW}" "$@" -} - -# Logs the given message in red with a timestamp. -function log_red() { - log_impl "${IO_COLOR_RED}" "$@" -} - -readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) -readonly tmphome="${tmpdir}/h" -mkdir -p "${tmphome}" - -function cleanup() { - rm -rf "${tmpdir}" -} -trap cleanup EXIT - -RUNNING_IN_CI="${RUNNING_IN_CI:-false}" - -# The workspace in the container, defaults to /workspace. -TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" - -pass_down_envvars=( - # TRAMPOLINE_V2 variables. - # Tells scripts whether they are running as part of CI or not. - "RUNNING_IN_CI" - # Indicates which CI system we're in. - "TRAMPOLINE_CI" - # Indicates the version of the script. - "TRAMPOLINE_VERSION" - # Contains path to build artifacts being executed. - "KOKORO_BUILD_ARTIFACTS_SUBDIR" -) - -log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" - -# Detect which CI systems we're in. If we're in any of the CI systems -# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be -# the name of the CI system. Both envvars will be passing down to the -# container for telling which CI system we're in. -if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then - # descriptive env var for indicating it's on CI. - RUNNING_IN_CI="true" - TRAMPOLINE_CI="kokoro" - if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then - if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then - log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." - exit 1 - fi - # This service account will be activated later. - TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" - else - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - gcloud auth list - fi - log_yellow "Configuring Container Registry access" - gcloud auth configure-docker --quiet - fi - pass_down_envvars+=( - # KOKORO dynamic variables. - "KOKORO_BUILD_NUMBER" - "KOKORO_BUILD_ID" - "KOKORO_JOB_NAME" - "KOKORO_GIT_COMMIT" - "KOKORO_GITHUB_COMMIT" - "KOKORO_GITHUB_PULL_REQUEST_NUMBER" - "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For flakybot - "KOKORO_GITHUB_COMMIT_URL" - "KOKORO_GITHUB_PULL_REQUEST_URL" - ) -elif [[ "${TRAVIS:-}" == "true" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="travis" - pass_down_envvars+=( - "TRAVIS_BRANCH" - "TRAVIS_BUILD_ID" - "TRAVIS_BUILD_NUMBER" - "TRAVIS_BUILD_WEB_URL" - "TRAVIS_COMMIT" - "TRAVIS_COMMIT_MESSAGE" - "TRAVIS_COMMIT_RANGE" - "TRAVIS_JOB_NAME" - "TRAVIS_JOB_NUMBER" - "TRAVIS_JOB_WEB_URL" - "TRAVIS_PULL_REQUEST" - "TRAVIS_PULL_REQUEST_BRANCH" - "TRAVIS_PULL_REQUEST_SHA" - "TRAVIS_PULL_REQUEST_SLUG" - "TRAVIS_REPO_SLUG" - "TRAVIS_SECURE_ENV_VARS" - "TRAVIS_TAG" - ) -elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="github-workflow" - pass_down_envvars+=( - "GITHUB_WORKFLOW" - "GITHUB_RUN_ID" - "GITHUB_RUN_NUMBER" - "GITHUB_ACTION" - "GITHUB_ACTIONS" - "GITHUB_ACTOR" - "GITHUB_REPOSITORY" - "GITHUB_EVENT_NAME" - "GITHUB_EVENT_PATH" - "GITHUB_SHA" - "GITHUB_REF" - "GITHUB_HEAD_REF" - "GITHUB_BASE_REF" - ) -elif [[ "${CIRCLECI:-}" == "true" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="circleci" - pass_down_envvars+=( - "CIRCLE_BRANCH" - "CIRCLE_BUILD_NUM" - "CIRCLE_BUILD_URL" - "CIRCLE_COMPARE_URL" - "CIRCLE_JOB" - "CIRCLE_NODE_INDEX" - "CIRCLE_NODE_TOTAL" - "CIRCLE_PREVIOUS_BUILD_NUM" - "CIRCLE_PROJECT_REPONAME" - "CIRCLE_PROJECT_USERNAME" - "CIRCLE_REPOSITORY_URL" - "CIRCLE_SHA1" - "CIRCLE_STAGE" - "CIRCLE_USERNAME" - "CIRCLE_WORKFLOW_ID" - "CIRCLE_WORKFLOW_JOB_ID" - "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" - "CIRCLE_WORKFLOW_WORKSPACE_ID" - ) -fi - -# Configure the service account for pulling the docker image. -function repo_root() { - local dir="$1" - while [[ ! -d "${dir}/.git" ]]; do - dir="$(dirname "$dir")" - done - echo "${dir}" -} - -# Detect the project root. In CI builds, we assume the script is in -# the git tree and traverse from there, otherwise, traverse from `pwd` -# to find `.git` directory. -if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - PROGRAM_PATH="$(realpath "$0")" - PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" - PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" -else - PROJECT_ROOT="$(repo_root $(pwd))" -fi - -log_yellow "Changing to the project root: ${PROJECT_ROOT}." -cd "${PROJECT_ROOT}" - -# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need -# to use this environment variable in `PROJECT_ROOT`. -if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then - - mkdir -p "${tmpdir}/gcloud" - gcloud_config_dir="${tmpdir}/gcloud" - - log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." - export CLOUDSDK_CONFIG="${gcloud_config_dir}" - - log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." - gcloud auth activate-service-account \ - --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" - log_yellow "Configuring Container Registry access" - gcloud auth configure-docker --quiet -fi - -required_envvars=( - # The basic trampoline configurations. - "TRAMPOLINE_IMAGE" - "TRAMPOLINE_BUILD_FILE" -) - -if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then - source "${PROJECT_ROOT}/.trampolinerc" -fi - -log_yellow "Checking environment variables." -for e in "${required_envvars[@]}" -do - if [[ -z "${!e:-}" ]]; then - log "Missing ${e} env var. Aborting." - exit 1 - fi -done - -# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 -# script: e.g. "github/repo-name/.kokoro/run_tests.sh" -TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" -log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" - -# ignore error on docker operations and test execution -set +e - -log_yellow "Preparing Docker image." -# We only download the docker image in CI builds. -if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - # Download the docker image specified by `TRAMPOLINE_IMAGE` - - # We may want to add --max-concurrent-downloads flag. - - log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." - if docker pull "${TRAMPOLINE_IMAGE}"; then - log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." - has_image="true" - else - log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." - has_image="false" - fi -else - # For local run, check if we have the image. - if docker images "${TRAMPOLINE_IMAGE}" | grep "${TRAMPOLINE_IMAGE%:*}"; then - has_image="true" - else - has_image="false" - fi -fi - - -# The default user for a Docker container has uid 0 (root). To avoid -# creating root-owned files in the build directory we tell docker to -# use the current user ID. -user_uid="$(id -u)" -user_gid="$(id -g)" -user_name="$(id -un)" - -# To allow docker in docker, we add the user to the docker group in -# the host os. -docker_gid=$(cut -d: -f3 < <(getent group docker)) - -update_cache="false" -if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then - # Build the Docker image from the source. - context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") - docker_build_flags=( - "-f" "${TRAMPOLINE_DOCKERFILE}" - "-t" "${TRAMPOLINE_IMAGE}" - "--build-arg" "UID=${user_uid}" - "--build-arg" "USERNAME=${user_name}" - ) - if [[ "${has_image}" == "true" ]]; then - docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") - fi - - log_yellow "Start building the docker image." - if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then - echo "docker build" "${docker_build_flags[@]}" "${context_dir}" - fi - - # ON CI systems, we want to suppress docker build logs, only - # output the logs when it fails. - if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - if docker build "${docker_build_flags[@]}" "${context_dir}" \ - > "${tmpdir}/docker_build.log" 2>&1; then - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - cat "${tmpdir}/docker_build.log" - fi - - log_green "Finished building the docker image." - update_cache="true" - else - log_red "Failed to build the Docker image, aborting." - log_yellow "Dumping the build logs:" - cat "${tmpdir}/docker_build.log" - exit 1 - fi - else - if docker build "${docker_build_flags[@]}" "${context_dir}"; then - log_green "Finished building the docker image." - update_cache="true" - else - log_red "Failed to build the Docker image, aborting." - exit 1 - fi - fi -else - if [[ "${has_image}" != "true" ]]; then - log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." - exit 1 - fi -fi - -# We use an array for the flags so they are easier to document. -docker_flags=( - # Remove the container after it exists. - "--rm" - - # Use the host network. - "--network=host" - - # Run in priviledged mode. We are not using docker for sandboxing or - # isolation, just for packaging our dev tools. - "--privileged" - - # Run the docker script with the user id. Because the docker image gets to - # write in ${PWD} you typically want this to be your user id. - # To allow docker in docker, we need to use docker gid on the host. - "--user" "${user_uid}:${docker_gid}" - - # Pass down the USER. - "--env" "USER=${user_name}" - - # Mount the project directory inside the Docker container. - "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" - "--workdir" "${TRAMPOLINE_WORKSPACE}" - "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" - - # Mount the temporary home directory. - "--volume" "${tmphome}:/h" - "--env" "HOME=/h" - - # Allow docker in docker. - "--volume" "/var/run/docker.sock:/var/run/docker.sock" - - # Mount the /tmp so that docker in docker can mount the files - # there correctly. - "--volume" "/tmp:/tmp" - # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR - # TODO(tmatsuo): This part is not portable. - "--env" "TRAMPOLINE_SECRET_DIR=/secrets" - "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" - "--env" "KOKORO_GFILE_DIR=/secrets/gfile" - "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" - "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" -) - -# Add an option for nicer output if the build gets a tty. -if [[ -t 0 ]]; then - docker_flags+=("-it") -fi - -# Passing down env vars -for e in "${pass_down_envvars[@]}" -do - if [[ -n "${!e:-}" ]]; then - docker_flags+=("--env" "${e}=${!e}") - fi -done - -# If arguments are given, all arguments will become the commands run -# in the container, otherwise run TRAMPOLINE_BUILD_FILE. -if [[ $# -ge 1 ]]; then - log_yellow "Running the given commands '" "${@:1}" "' in the container." - readonly commands=("${@:1}") - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" - fi - docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" -else - log_yellow "Running the tests in a Docker container." - docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" - fi - docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" -fi - - -test_retval=$? - -if [[ ${test_retval} -eq 0 ]]; then - log_green "Build finished with ${test_retval}" -else - log_red "Build finished with ${test_retval}" -fi - -# Only upload it when the test passes. -if [[ "${update_cache}" == "true" ]] && \ - [[ $test_retval == 0 ]] && \ - [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then - log_yellow "Uploading the Docker image." - if docker push "${TRAMPOLINE_IMAGE}"; then - log_green "Finished uploading the Docker image." - else - log_red "Failed uploading the Docker image." - fi - # Call trampoline_after_upload_hook if it's defined. - if function_exists trampoline_after_upload_hook; then - trampoline_after_upload_hook - fi - -fi - -exit "${test_retval}" From eeac5f731f84a32c982d0d7d7e2fa3a486bba645 Mon Sep 17 00:00:00 2001 From: Jack Wotherspoon Date: Tue, 18 Mar 2025 12:09:15 -0400 Subject: [PATCH 3/8] test: add new int tests for Connector with domain name (#1244) --- .github/workflows/tests.yml | 2 ++ tests/system/test_asyncpg_connection.py | 32 +++++++++++++++++++++++-- tests/system/test_pg8000_connection.py | 30 ++++++++++++++++++++++- 3 files changed, 61 insertions(+), 3 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index e710138f6..b8e6eb58d 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -81,6 +81,7 @@ jobs: POSTGRES_CAS_PASS:${{ vars.GOOGLE_CLOUD_PROJECT }}/POSTGRES_CAS_PASS POSTGRES_CUSTOMER_CAS_CONNECTION_NAME:${{ vars.GOOGLE_CLOUD_PROJECT }}/POSTGRES_CUSTOMER_CAS_CONNECTION_NAME POSTGRES_CUSTOMER_CAS_PASS:${{ vars.GOOGLE_CLOUD_PROJECT }}/POSTGRES_CUSTOMER_CAS_PASS + POSTGRES_CUSTOMER_CAS_PASS_VALID_DOMAIN_NAME:${{ vars.GOOGLE_CLOUD_PROJECT }}/POSTGRES_CUSTOMER_CAS_PASS_VALID_DOMAIN_NAME SQLSERVER_CONNECTION_NAME:${{ vars.GOOGLE_CLOUD_PROJECT }}/SQLSERVER_CONNECTION_NAME SQLSERVER_USER:${{ vars.GOOGLE_CLOUD_PROJECT }}/SQLSERVER_USER SQLSERVER_PASS:${{ vars.GOOGLE_CLOUD_PROJECT }}/SQLSERVER_PASS @@ -102,6 +103,7 @@ jobs: POSTGRES_CAS_PASS: "${{ steps.secrets.outputs.POSTGRES_CAS_PASS }}" POSTGRES_CUSTOMER_CAS_CONNECTION_NAME: "${{ steps.secrets.outputs.POSTGRES_CUSTOMER_CAS_CONNECTION_NAME }}" POSTGRES_CUSTOMER_CAS_PASS: "${{ steps.secrets.outputs.POSTGRES_CUSTOMER_CAS_PASS }}" + POSTGRES_CUSTOMER_CAS_PASS_VALID_DOMAIN_NAME: "${{ steps.secrets.outputs.POSTGRES_CUSTOMER_CAS_PASS_VALID_DOMAIN_NAME }}" SQLSERVER_CONNECTION_NAME: "${{ steps.secrets.outputs.SQLSERVER_CONNECTION_NAME }}" SQLSERVER_USER: "${{ steps.secrets.outputs.SQLSERVER_USER }}" SQLSERVER_PASS: "${{ steps.secrets.outputs.SQLSERVER_PASS }}" diff --git a/tests/system/test_asyncpg_connection.py b/tests/system/test_asyncpg_connection.py index 8de14d576..dfcc3941b 100644 --- a/tests/system/test_asyncpg_connection.py +++ b/tests/system/test_asyncpg_connection.py @@ -16,13 +16,15 @@ import asyncio import os -from typing import Any +from typing import Any, Union import asyncpg import sqlalchemy import sqlalchemy.ext.asyncio from google.cloud.sql.connector import Connector +from google.cloud.sql.connector import DefaultResolver +from google.cloud.sql.connector import DnsResolver async def create_sqlalchemy_engine( @@ -31,6 +33,7 @@ async def create_sqlalchemy_engine( password: str, db: str, refresh_strategy: str = "background", + resolver: Union[type[DefaultResolver], type[DnsResolver]] = DefaultResolver, ) -> tuple[sqlalchemy.ext.asyncio.engine.AsyncEngine, Connector]: """Creates a connection pool for a Cloud SQL instance and returns the pool and the connector. Callers are responsible for closing the pool and the @@ -64,9 +67,16 @@ async def create_sqlalchemy_engine( Refresh strategy for the Cloud SQL Connector. Can be one of "lazy" or "background". For serverless environments use "lazy" to avoid errors resulting from CPU being throttled. + resolver (Optional[google.cloud.sql.connector.DefaultResolver]): + Resolver class for resolving instance connection name. Use + google.cloud.sql.connector.DnsResolver when resolving DNS domain + names or google.cloud.sql.connector.DefaultResolver for regular + instance connection names ("my-project:my-region:my-instance"). """ loop = asyncio.get_running_loop() - connector = Connector(loop=loop, refresh_strategy=refresh_strategy) + connector = Connector( + loop=loop, refresh_strategy=refresh_strategy, resolver=resolver + ) async def getconn() -> asyncpg.Connection: conn: asyncpg.Connection = await connector.connect_async( @@ -183,6 +193,24 @@ async def test_lazy_sqlalchemy_connection_with_asyncpg() -> None: await connector.close_async() +async def test_custom_SAN_with_dns_sqlalchemy_connection_with_asyncpg() -> None: + """Basic test to get time from database.""" + inst_conn_name = os.environ["POSTGRES_CUSTOMER_CAS_PASS_VALID_DOMAIN_NAME"] + user = os.environ["POSTGRES_USER"] + password = os.environ["POSTGRES_CUSTOMER_CAS_PASS"] + db = os.environ["POSTGRES_DB"] + + pool, connector = await create_sqlalchemy_engine( + inst_conn_name, user, password, db, resolver=DnsResolver + ) + + async with pool.connect() as conn: + res = (await conn.execute(sqlalchemy.text("SELECT 1"))).fetchone() + assert res[0] == 1 + + await connector.close_async() + + async def test_connection_with_asyncpg() -> None: """Basic test to get time from database.""" inst_conn_name = os.environ["POSTGRES_CONNECTION_NAME"] diff --git a/tests/system/test_pg8000_connection.py b/tests/system/test_pg8000_connection.py index b56a8e823..c47b860c9 100644 --- a/tests/system/test_pg8000_connection.py +++ b/tests/system/test_pg8000_connection.py @@ -18,10 +18,14 @@ import os # [START cloud_sql_connector_postgres_pg8000] +from typing import Union + import pg8000 import sqlalchemy from google.cloud.sql.connector import Connector +from google.cloud.sql.connector import DefaultResolver +from google.cloud.sql.connector import DnsResolver def create_sqlalchemy_engine( @@ -30,6 +34,7 @@ def create_sqlalchemy_engine( password: str, db: str, refresh_strategy: str = "background", + resolver: Union[type[DefaultResolver], type[DnsResolver]] = DefaultResolver, ) -> tuple[sqlalchemy.engine.Engine, Connector]: """Creates a connection pool for a Cloud SQL instance and returns the pool and the connector. Callers are responsible for closing the pool and the @@ -64,8 +69,13 @@ def create_sqlalchemy_engine( Refresh strategy for the Cloud SQL Connector. Can be one of "lazy" or "background". For serverless environments use "lazy" to avoid errors resulting from CPU being throttled. + resolver (Optional[google.cloud.sql.connector.DefaultResolver]): + Resolver class for resolving instance connection name. Use + google.cloud.sql.connector.DnsResolver when resolving DNS domain + names or google.cloud.sql.connector.DefaultResolver for regular + instance connection names ("my-project:my-region:my-instance"). """ - connector = Connector(refresh_strategy=refresh_strategy) + connector = Connector(refresh_strategy=refresh_strategy, resolver=resolver) def getconn() -> pg8000.dbapi.Connection: conn: pg8000.dbapi.Connection = connector.connect( @@ -153,3 +163,21 @@ def test_customer_managed_CAS_pg8000_connection() -> None: curr_time = time[0] assert type(curr_time) is datetime connector.close() + + +def test_custom_SAN_with_dns_pg8000_connection() -> None: + """Basic test to get time from database.""" + inst_conn_name = os.environ["POSTGRES_CUSTOMER_CAS_PASS_VALID_DOMAIN_NAME"] + user = os.environ["POSTGRES_USER"] + password = os.environ["POSTGRES_CUSTOMER_CAS_PASS"] + db = os.environ["POSTGRES_DB"] + + engine, connector = create_sqlalchemy_engine( + inst_conn_name, user, password, db, resolver=DnsResolver + ) + with engine.connect() as conn: + time = conn.execute(sqlalchemy.text("SELECT NOW()")).fetchone() + conn.commit() + curr_time = time[0] + assert type(curr_time) is datetime + connector.close() From 15934bd18ab426edd19af67be799876b52895a48 Mon Sep 17 00:00:00 2001 From: Jack Wotherspoon Date: Wed, 19 Mar 2025 13:00:51 -0400 Subject: [PATCH 4/8] feat: add domain name validation (#1246) --- google/cloud/sql/connector/connection_name.py | 10 +++++ google/cloud/sql/connector/resolver.py | 13 ++++++- tests/unit/test_connection_name.py | 38 +++++++++++++++++++ 3 files changed, 59 insertions(+), 2 deletions(-) diff --git a/google/cloud/sql/connector/connection_name.py b/google/cloud/sql/connector/connection_name.py index 1bf711ab7..437fd6607 100644 --- a/google/cloud/sql/connector/connection_name.py +++ b/google/cloud/sql/connector/connection_name.py @@ -19,6 +19,10 @@ # Additionally, we have to support legacy "domain-scoped" projects # (e.g. "google.com:PROJECT") CONN_NAME_REGEX = re.compile(("([^:]+(:[^:]+)?):([^:]+):([^:]+)")) +# The domain name pattern in accordance with RFC 1035, RFC 1123 and RFC 2181. +DOMAIN_NAME_REGEX = re.compile( + r"^(?:[_a-z0-9](?:[_a-z0-9-]{0,61}[a-z0-9])?\.)+(?:[a-z](?:[a-z0-9-]{0,61}[a-z0-9])?)?$" +) @dataclass @@ -39,6 +43,12 @@ def __str__(self) -> str: return f"{self.project}:{self.region}:{self.instance_name}" +def _is_valid_domain(domain_name: str) -> bool: + if DOMAIN_NAME_REGEX.fullmatch(domain_name) is None: + return False + return True + + def _parse_connection_name(connection_name: str) -> ConnectionName: return _parse_connection_name_with_domain_name(connection_name, "") diff --git a/google/cloud/sql/connector/resolver.py b/google/cloud/sql/connector/resolver.py index 39efd0492..7d717ca05 100644 --- a/google/cloud/sql/connector/resolver.py +++ b/google/cloud/sql/connector/resolver.py @@ -17,6 +17,7 @@ from google.cloud.sql.connector.connection_name import ( _parse_connection_name_with_domain_name, ) +from google.cloud.sql.connector.connection_name import _is_valid_domain from google.cloud.sql.connector.connection_name import _parse_connection_name from google.cloud.sql.connector.connection_name import ConnectionName from google.cloud.sql.connector.exceptions import DnsResolutionError @@ -40,8 +41,16 @@ async def resolve(self, dns: str) -> ConnectionName: # type: ignore conn_name = _parse_connection_name(dns) except ValueError: # The connection name was not project:region:instance format. - # Attempt to query a TXT record to get connection name. - conn_name = await self.query_dns(dns) + # Check if connection name is a valid DNS domain name + if _is_valid_domain(dns): + # Attempt to query a TXT record to get connection name. + conn_name = await self.query_dns(dns) + else: + raise ValueError( + "Arg `instance_connection_string` must have " + "format: PROJECT:REGION:INSTANCE or be a valid DNS domain " + f"name, got {dns}." + ) return conn_name async def query_dns(self, dns: str) -> ConnectionName: diff --git a/tests/unit/test_connection_name.py b/tests/unit/test_connection_name.py index 783e14fe3..218034d51 100644 --- a/tests/unit/test_connection_name.py +++ b/tests/unit/test_connection_name.py @@ -17,6 +17,7 @@ from google.cloud.sql.connector.connection_name import ( _parse_connection_name_with_domain_name, ) +from google.cloud.sql.connector.connection_name import _is_valid_domain from google.cloud.sql.connector.connection_name import _parse_connection_name from google.cloud.sql.connector.connection_name import ConnectionName @@ -96,3 +97,40 @@ def test_parse_connection_name_with_domain_name( assert expected == _parse_connection_name_with_domain_name( connection_name, domain_name ) + + +@pytest.mark.parametrize( + "domain_name, expected", + [ + ( + "prod-db.mycompany.example.com", + True, + ), + ( + "example.com.", # trailing dot + True, + ), + ( + "-example.com.", # leading hyphen + False, + ), + ( + "example", # missing TLD + False, + ), + ( + "127.0.0.1", # IPv4 address + False, + ), + ( + "0:0:0:0:0:0:0:1", # IPv6 address + False, + ), + ], +) +def test_is_valid_domain(domain_name: str, expected: bool) -> None: + """ + Test that _is_valid_domain works correctly for + parsing domain names. + """ + assert expected == _is_valid_domain(domain_name) From 80ca37937670fb40d856ac365973d30407f88d7e Mon Sep 17 00:00:00 2001 From: "Jonathan Hess (he/him)" <103529393+hessjcg@users.noreply.github.com> Date: Thu, 20 Mar 2025 11:58:30 -0600 Subject: [PATCH 5/8] refactor: Use new ConnectSettings.DnsNames field determine the DNS Name of the instance. (#1242) The Cloud SQL Instance ConnectSettings added a new field `dns_names` which contains a list of valid DNS names for an instance. The Python Connector will use these DNS names, falling back to the old `dns_name` field if `dns_names` is not populated. Other connectors use this DNS name for hostname validation for the instance's TLS server certificate. However, the python connector does not perform hostname validation due to limitations of python's TLS library. See also: GoogleCloudPlatform/cloud-sql-go-connector#954 --- .gitignore | 3 +++ google/cloud/sql/connector/client.py | 21 +++++++++++++++++---- tests/unit/mocks.py | 14 +++++++++++++- tests/unit/test_client.py | 22 ++++++++++++++++++++++ 4 files changed, 55 insertions(+), 5 deletions(-) diff --git a/.gitignore b/.gitignore index 9ef6a9067..9f449ce4a 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,6 @@ venv .python-version cloud_sql_python_connector.egg-info/ dist/ +.idea +.coverage +sponge_log.xml diff --git a/google/cloud/sql/connector/client.py b/google/cloud/sql/connector/client.py index 8a31eb9a0..556a01bde 100644 --- a/google/cloud/sql/connector/client.py +++ b/google/cloud/sql/connector/client.py @@ -156,10 +156,23 @@ async def _get_metadata( # resolve dnsName into IP address for PSC # Note that we have to check for PSC enablement also because CAS # instances also set the dnsName field. - # Remove trailing period from DNS name. Required for SSL in Python - dns_name = ret_dict.get("dnsName", "").rstrip(".") - if dns_name and ret_dict.get("pscEnabled"): - ip_addresses["PSC"] = dns_name + if ret_dict.get("pscEnabled"): + # Find PSC instance DNS name in the dns_names field + psc_dns_names = [ + d["name"] + for d in ret_dict.get("dnsNames", []) + if d["connectionType"] == "PRIVATE_SERVICE_CONNECT" + and d["dnsScope"] == "INSTANCE" + ] + dns_name = psc_dns_names[0] if psc_dns_names else None + + # Fall back do dns_name field if dns_names is not set + if dns_name is None: + dns_name = ret_dict.get("dnsName", None) + + # Remove trailing period from DNS name. Required for SSL in Python + if dns_name: + ip_addresses["PSC"] = dns_name.rstrip(".") return { "ip_addresses": ip_addresses, diff --git a/tests/unit/mocks.py b/tests/unit/mocks.py index 5d863677b..cd3299b7f 100644 --- a/tests/unit/mocks.py +++ b/tests/unit/mocks.py @@ -225,6 +225,7 @@ def __init__( "PRIMARY": "127.0.0.1", "PRIVATE": "10.0.0.1", }, + legacy_dns_name: bool = False, cert_before: datetime = datetime.datetime.now(datetime.timezone.utc), cert_expiration: datetime = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(hours=1), @@ -237,6 +238,7 @@ def __init__( self.psc_enabled = False self.cert_before = cert_before self.cert_expiration = cert_expiration + self.legacy_dns_name = legacy_dns_name # create self signed CA cert self.server_ca, self.server_key = generate_cert( self.project, self.name, cert_before, cert_expiration @@ -255,12 +257,22 @@ async def connect_settings(self, request: Any) -> web.Response: "instance": self.name, "expirationTime": str(self.cert_expiration), }, - "dnsName": "abcde.12345.us-central1.sql.goog", "pscEnabled": self.psc_enabled, "ipAddresses": ip_addrs, "region": self.region, "databaseVersion": self.db_version, } + if self.legacy_dns_name: + response["dnsName"] = "abcde.12345.us-central1.sql.goog" + else: + response["dnsNames"] = [ + { + "name": "abcde.12345.us-central1.sql.goog", + "connectionType": "PRIVATE_SERVICE_CONNECT", + "dnsScope": "INSTANCE", + } + ] + return web.Response(content_type="application/json", body=json.dumps(response)) async def generate_ephemeral(self, request: Any) -> web.Response: diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index af42af0ae..cfe509470 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -65,6 +65,28 @@ async def test_get_metadata_with_psc(fake_client: CloudSQLClient) -> None: assert isinstance(resp["server_ca_cert"], str) +@pytest.mark.asyncio +async def test_get_metadata_legacy_dns_with_psc(fake_client: CloudSQLClient) -> None: + """ + Test _get_metadata returns successfully with PSC IP type. + """ + # set PSC to enabled on test instance + fake_client.instance.psc_enabled = True + fake_client.instance.legacy_dns_name = True + resp = await fake_client._get_metadata( + "test-project", + "test-region", + "test-instance", + ) + assert resp["database_version"] == "POSTGRES_15" + assert resp["ip_addresses"] == { + "PRIMARY": "127.0.0.1", + "PRIVATE": "10.0.0.1", + "PSC": "abcde.12345.us-central1.sql.goog", + } + assert isinstance(resp["server_ca_cert"], str) + + @pytest.mark.asyncio async def test_get_ephemeral(fake_client: CloudSQLClient) -> None: """ From dee267ffcfab1793187987084e2bbadcf189c7a6 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 20 Mar 2025 19:29:01 +0100 Subject: [PATCH 6/8] chore(deps): update python-nonmajor (#1240) --- requirements-test.txt | 6 +++--- requirements.txt | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements-test.txt b/requirements-test.txt index 52816e95c..7d276cbf2 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -1,8 +1,8 @@ -pytest==8.3.4 -mock==5.1.0 +pytest==8.3.5 +mock==5.2.0 pytest-cov==6.0.0 pytest-asyncio==0.25.3 -SQLAlchemy[asyncio]==2.0.38 +SQLAlchemy[asyncio]==2.0.39 sqlalchemy-pytds==1.0.2 sqlalchemy-stubs==0.4 PyMySQL==1.1.1 diff --git a/requirements.txt b/requirements.txt index fd04d2873..1dc6bc047 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ aiofiles==24.1.0 -aiohttp==3.11.12 -cryptography==44.0.1 +aiohttp==3.11.14 +cryptography==44.0.2 dnspython==2.7.0 Requests==2.32.3 google-auth==2.38.0 From 1405f564019f6a30a15535ed2e0d1dc108f38195 Mon Sep 17 00:00:00 2001 From: Jack Wotherspoon Date: Fri, 21 Mar 2025 12:40:05 -0400 Subject: [PATCH 7/8] feat: reset connection when the DNS record changes (#1241) If the connector is configured with a domain name, when that domain name record changes to resolve to a new instance, the connector should detect that change, close all connections to the old instance, and create connections to the new instance. --- README.md | 38 +++ google/cloud/sql/connector/__init__.py | 2 +- google/cloud/sql/connector/connection_info.py | 22 ++ google/cloud/sql/connector/connection_name.py | 4 + google/cloud/sql/connector/connector.py | 65 +++-- google/cloud/sql/connector/exceptions.py | 7 + google/cloud/sql/connector/instance.py | 13 +- google/cloud/sql/connector/lazy.py | 15 +- google/cloud/sql/connector/monitored_cache.py | 146 +++++++++++ google/cloud/sql/connector/pg8000.py | 15 +- google/cloud/sql/connector/pymysql.py | 14 +- google/cloud/sql/connector/pytds.py | 14 +- tests/conftest.py | 2 +- tests/system/test_connector_object.py | 2 +- tests/system/test_ip_types.py | 2 +- tests/system/test_pymysql_connection.py | 2 +- tests/system/test_pytds_connection.py | 2 +- tests/unit/test_connection_name.py | 4 + tests/unit/test_connector.py | 2 +- tests/unit/test_instance.py | 2 +- tests/unit/test_lazy.py | 21 ++ tests/unit/test_monitored_cache.py | 240 ++++++++++++++++++ tests/unit/test_pg8000.py | 13 +- tests/unit/test_pymysql.py | 13 +- tests/unit/test_pytds.py | 35 ++- tests/unit/test_rate_limiter.py | 2 +- tests/unit/test_utils.py | 2 +- 27 files changed, 604 insertions(+), 95 deletions(-) create mode 100644 google/cloud/sql/connector/monitored_cache.py create mode 100644 tests/unit/test_monitored_cache.py diff --git a/README.md b/README.md index 1f0e633b9..d79e706d3 100644 --- a/README.md +++ b/README.md @@ -428,6 +428,44 @@ with Connector(resolver=DnsResolver) as connector: # ... use SQLAlchemy engine normally ``` +### Automatic failover using DNS domain names + +> [!NOTE] +> +> Usage of the `asyncpg` driver does not currently support automatic failover. + +When the connector is configured using a domain name, the connector will +periodically check if the DNS record for an instance changes. When the connector +detects that the domain name refers to a different instance, the connector will +close all open connections to the old instance. Subsequent connection attempts +will be directed to the new instance. + +For example: suppose application is configured to connect using the +domain name `prod-db.mycompany.example.com`. Initially the private DNS +zone has a TXT record with the value `my-project:region:my-instance`. The +application establishes connections to the `my-project:region:my-instance` +Cloud SQL instance. + +Then, to reconfigure the application to use a different database +instance, change the value of the `prod-db.mycompany.example.com` DNS record +from `my-project:region:my-instance` to `my-project:other-region:my-instance-2` + +The connector inside the application detects the change to this +DNS record. Now, when the application connects to its database using the +domain name `prod-db.mycompany.example.com`, it will connect to the +`my-project:other-region:my-instance-2` Cloud SQL instance. + +The connector will automatically close all existing connections to +`my-project:region:my-instance`. This will force the connection pools to +establish new connections. Also, it may cause database queries in progress +to fail. + +The connector will poll for changes to the DNS name every 30 seconds by default. +You may configure the frequency of the connections using the Connector's +`failover_period` argument (i.e. `Connector(failover_period=60`). When this is +set to 0, the connector will disable polling and only check if the DNS record +changed when it is creating a new connection. + ### Using the Python Connector with Python Web Frameworks The Python Connector can be used alongside popular Python web frameworks such diff --git a/google/cloud/sql/connector/__init__.py b/google/cloud/sql/connector/__init__.py index 99a5097a2..6913337d3 100644 --- a/google/cloud/sql/connector/__init__.py +++ b/google/cloud/sql/connector/__init__.py @@ -1,4 +1,4 @@ -"""" +""" Copyright 2019 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/sql/connector/connection_info.py b/google/cloud/sql/connector/connection_info.py index 82e3a9018..c9e48935f 100644 --- a/google/cloud/sql/connector/connection_info.py +++ b/google/cloud/sql/connector/connection_info.py @@ -14,6 +14,7 @@ from __future__ import annotations +import abc from dataclasses import dataclass import logging import ssl @@ -34,6 +35,27 @@ logger = logging.getLogger(name=__name__) +class ConnectionInfoCache(abc.ABC): + """Abstract class for Connector connection info caches.""" + + @abc.abstractmethod + async def connect_info(self) -> ConnectionInfo: + pass + + @abc.abstractmethod + async def force_refresh(self) -> None: + pass + + @abc.abstractmethod + async def close(self) -> None: + pass + + @property + @abc.abstractmethod + def closed(self) -> bool: + pass + + @dataclass class ConnectionInfo: """Contains all necessary information to connect securely to the diff --git a/google/cloud/sql/connector/connection_name.py b/google/cloud/sql/connector/connection_name.py index 437fd6607..ad5dc40fb 100644 --- a/google/cloud/sql/connector/connection_name.py +++ b/google/cloud/sql/connector/connection_name.py @@ -42,6 +42,10 @@ def __str__(self) -> str: return f"{self.domain_name} -> {self.project}:{self.region}:{self.instance_name}" return f"{self.project}:{self.region}:{self.instance_name}" + def get_connection_string(self) -> str: + """Get the instance connection string for the Cloud SQL instance.""" + return f"{self.project}:{self.region}:{self.instance_name}" + def _is_valid_domain(domain_name: str) -> bool: if DOMAIN_NAME_REGEX.fullmatch(domain_name) is None: diff --git a/google/cloud/sql/connector/connector.py b/google/cloud/sql/connector/connector.py index 3e53e754a..c76092a40 100755 --- a/google/cloud/sql/connector/connector.py +++ b/google/cloud/sql/connector/connector.py @@ -20,9 +20,10 @@ from functools import partial import logging import os +import socket from threading import Thread from types import TracebackType -from typing import Any, Optional, Union +from typing import Any, Callable, Optional, Union import google.auth from google.auth.credentials import Credentials @@ -35,6 +36,7 @@ from google.cloud.sql.connector.enums import RefreshStrategy from google.cloud.sql.connector.instance import RefreshAheadCache from google.cloud.sql.connector.lazy import LazyRefreshCache +from google.cloud.sql.connector.monitored_cache import MonitoredCache import google.cloud.sql.connector.pg8000 as pg8000 import google.cloud.sql.connector.pymysql as pymysql import google.cloud.sql.connector.pytds as pytds @@ -46,6 +48,7 @@ logger = logging.getLogger(name=__name__) ASYNC_DRIVERS = ["asyncpg"] +SERVER_PROXY_PORT = 3307 _DEFAULT_SCHEME = "https://" _DEFAULT_UNIVERSE_DOMAIN = "googleapis.com" _SQLADMIN_HOST_TEMPLATE = "sqladmin.{universe_domain}" @@ -67,6 +70,7 @@ def __init__( universe_domain: Optional[str] = None, refresh_strategy: str | RefreshStrategy = RefreshStrategy.BACKGROUND, resolver: type[DefaultResolver] | type[DnsResolver] = DefaultResolver, + failover_period: int = 30, ) -> None: """Initializes a Connector instance. @@ -114,6 +118,11 @@ def __init__( name. To resolve a DNS record to an instance connection name, use DnsResolver. Default: DefaultResolver + + failover_period (int): The time interval in seconds between each + attempt to check if a failover has occured for a given instance. + Must be used with `resolver=DnsResolver` to have any effect. + Default: 30 """ # if refresh_strategy is str, convert to RefreshStrategy enum if isinstance(refresh_strategy, str): @@ -143,9 +152,7 @@ def __init__( ) # initialize dict to store caches, key is a tuple consisting of instance # connection name string and enable_iam_auth boolean flag - self._cache: dict[ - tuple[str, bool], Union[RefreshAheadCache, LazyRefreshCache] - ] = {} + self._cache: dict[tuple[str, bool], MonitoredCache] = {} self._client: Optional[CloudSQLClient] = None # initialize credentials @@ -167,6 +174,7 @@ def __init__( self._enable_iam_auth = enable_iam_auth self._user_agent = user_agent self._resolver = resolver() + self._failover_period = failover_period # if ip_type is str, convert to IPTypes enum if isinstance(ip_type, str): ip_type = IPTypes._from_str(ip_type) @@ -285,15 +293,19 @@ async def connect_async( driver=driver, ) enable_iam_auth = kwargs.pop("enable_iam_auth", self._enable_iam_auth) - if (instance_connection_string, enable_iam_auth) in self._cache: - cache = self._cache[(instance_connection_string, enable_iam_auth)] + + conn_name = await self._resolver.resolve(instance_connection_string) + # Cache entry must exist and not be closed + if (str(conn_name), enable_iam_auth) in self._cache and not self._cache[ + (str(conn_name), enable_iam_auth) + ].closed: + monitored_cache = self._cache[(str(conn_name), enable_iam_auth)] else: - conn_name = await self._resolver.resolve(instance_connection_string) if self._refresh_strategy == RefreshStrategy.LAZY: logger.debug( f"['{conn_name}']: Refresh strategy is set to lazy refresh" ) - cache = LazyRefreshCache( + cache: Union[LazyRefreshCache, RefreshAheadCache] = LazyRefreshCache( conn_name, self._client, self._keys, @@ -309,8 +321,14 @@ async def connect_async( self._keys, enable_iam_auth, ) + # wrap cache as a MonitoredCache + monitored_cache = MonitoredCache( + cache, + self._failover_period, + self._resolver, + ) logger.debug(f"['{conn_name}']: Connection info added to cache") - self._cache[(instance_connection_string, enable_iam_auth)] = cache + self._cache[(str(conn_name), enable_iam_auth)] = monitored_cache connect_func = { "pymysql": pymysql.connect, @@ -321,7 +339,7 @@ async def connect_async( # only accept supported database drivers try: - connector = connect_func[driver] + connector: Callable = connect_func[driver] # type: ignore except KeyError: raise KeyError(f"Driver '{driver}' is not supported.") @@ -339,14 +357,14 @@ async def connect_async( # attempt to get connection info for Cloud SQL instance try: - conn_info = await cache.connect_info() + conn_info = await monitored_cache.connect_info() # validate driver matches intended database engine DriverMapping.validate_engine(driver, conn_info.database_version) ip_address = conn_info.get_preferred_ip(ip_type) except Exception: # with an error from Cloud SQL Admin API call or IP type, invalidate # the cache and re-raise the error - await self._remove_cached(instance_connection_string, enable_iam_auth) + await self._remove_cached(str(conn_name), enable_iam_auth) raise logger.debug(f"['{conn_info.conn_name}']: Connecting to {ip_address}:3307") # format `user` param for automatic IAM database authn @@ -367,18 +385,28 @@ async def connect_async( await conn_info.create_ssl_context(enable_iam_auth), **kwargs, ) - # synchronous drivers are blocking and run using executor + # Create socket with SSLContext for sync drivers + ctx = await conn_info.create_ssl_context(enable_iam_auth) + sock = ctx.wrap_socket( + socket.create_connection((ip_address, SERVER_PROXY_PORT)), + server_hostname=ip_address, + ) + # If this connection was opened using a domain name, then store it + # for later in case we need to forcibly close it on failover. + if conn_info.conn_name.domain_name: + monitored_cache.sockets.append(sock) + # Synchronous drivers are blocking and run using executor connect_partial = partial( connector, ip_address, - await conn_info.create_ssl_context(enable_iam_auth), + sock, **kwargs, ) return await self._loop.run_in_executor(None, connect_partial) except Exception: # with any exception, we attempt a force refresh, then throw the error - await cache.force_refresh() + await monitored_cache.force_refresh() raise async def _remove_cached( @@ -456,6 +484,7 @@ async def create_async_connector( universe_domain: Optional[str] = None, refresh_strategy: str | RefreshStrategy = RefreshStrategy.BACKGROUND, resolver: type[DefaultResolver] | type[DnsResolver] = DefaultResolver, + failover_period: int = 30, ) -> Connector: """Helper function to create Connector object for asyncio connections. @@ -507,6 +536,11 @@ async def create_async_connector( DnsResolver. Default: DefaultResolver + failover_period (int): The time interval in seconds between each + attempt to check if a failover has occured for a given instance. + Must be used with `resolver=DnsResolver` to have any effect. + Default: 30 + Returns: A Connector instance configured with running event loop. """ @@ -525,4 +559,5 @@ async def create_async_connector( universe_domain=universe_domain, refresh_strategy=refresh_strategy, resolver=resolver, + failover_period=failover_period, ) diff --git a/google/cloud/sql/connector/exceptions.py b/google/cloud/sql/connector/exceptions.py index 92e3e5662..da39ea25d 100644 --- a/google/cloud/sql/connector/exceptions.py +++ b/google/cloud/sql/connector/exceptions.py @@ -77,3 +77,10 @@ class DnsResolutionError(Exception): Exception to be raised when an instance connection name can not be resolved from a DNS record. """ + + +class CacheClosedError(Exception): + """ + Exception to be raised when a ConnectionInfoCache can not be accessed after + it is closed. + """ diff --git a/google/cloud/sql/connector/instance.py b/google/cloud/sql/connector/instance.py index 5df272fe2..fb8711309 100644 --- a/google/cloud/sql/connector/instance.py +++ b/google/cloud/sql/connector/instance.py @@ -24,6 +24,7 @@ from google.cloud.sql.connector.client import CloudSQLClient from google.cloud.sql.connector.connection_info import ConnectionInfo +from google.cloud.sql.connector.connection_info import ConnectionInfoCache from google.cloud.sql.connector.connection_name import ConnectionName from google.cloud.sql.connector.exceptions import RefreshNotValidError from google.cloud.sql.connector.rate_limiter import AsyncRateLimiter @@ -35,7 +36,7 @@ APPLICATION_NAME = "cloud-sql-python-connector" -class RefreshAheadCache: +class RefreshAheadCache(ConnectionInfoCache): """Cache that refreshes connection info in the background prior to expiration. Background tasks are used to schedule refresh attempts to get a new @@ -74,6 +75,15 @@ def __init__( self._refresh_in_progress = asyncio.locks.Event() self._current: asyncio.Task = self._schedule_refresh(0) self._next: asyncio.Task = self._current + self._closed = False + + @property + def conn_name(self) -> ConnectionName: + return self._conn_name + + @property + def closed(self) -> bool: + return self._closed async def force_refresh(self) -> None: """ @@ -212,3 +222,4 @@ async def close(self) -> None: # gracefully wait for tasks to cancel tasks = asyncio.gather(self._current, self._next, return_exceptions=True) await asyncio.wait_for(tasks, timeout=2.0) + self._closed = True diff --git a/google/cloud/sql/connector/lazy.py b/google/cloud/sql/connector/lazy.py index 1bc4f90f8..c75d07e52 100644 --- a/google/cloud/sql/connector/lazy.py +++ b/google/cloud/sql/connector/lazy.py @@ -21,13 +21,14 @@ from google.cloud.sql.connector.client import CloudSQLClient from google.cloud.sql.connector.connection_info import ConnectionInfo +from google.cloud.sql.connector.connection_info import ConnectionInfoCache from google.cloud.sql.connector.connection_name import ConnectionName from google.cloud.sql.connector.refresh_utils import _refresh_buffer logger = logging.getLogger(name=__name__) -class LazyRefreshCache: +class LazyRefreshCache(ConnectionInfoCache): """Cache that refreshes connection info when a caller requests a connection. Only refreshes the cache when a new connection is requested and the current @@ -62,6 +63,15 @@ def __init__( self._lock = asyncio.Lock() self._cached: Optional[ConnectionInfo] = None self._needs_refresh = False + self._closed = False + + @property + def conn_name(self) -> ConnectionName: + return self._conn_name + + @property + def closed(self) -> bool: + return self._closed async def force_refresh(self) -> None: """ @@ -121,4 +131,5 @@ async def close(self) -> None: """Close is a no-op and provided purely for a consistent interface with other cache types. """ - pass + self._closed = True + return diff --git a/google/cloud/sql/connector/monitored_cache.py b/google/cloud/sql/connector/monitored_cache.py new file mode 100644 index 000000000..0c3fc4d03 --- /dev/null +++ b/google/cloud/sql/connector/monitored_cache.py @@ -0,0 +1,146 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import asyncio +import logging +import ssl +from typing import Any, Callable, Optional, Union + +from google.cloud.sql.connector.connection_info import ConnectionInfo +from google.cloud.sql.connector.connection_info import ConnectionInfoCache +from google.cloud.sql.connector.exceptions import CacheClosedError +from google.cloud.sql.connector.instance import RefreshAheadCache +from google.cloud.sql.connector.lazy import LazyRefreshCache +from google.cloud.sql.connector.resolver import DefaultResolver +from google.cloud.sql.connector.resolver import DnsResolver + +logger = logging.getLogger(name=__name__) + + +class MonitoredCache(ConnectionInfoCache): + def __init__( + self, + cache: Union[RefreshAheadCache, LazyRefreshCache], + failover_period: int, + resolver: Union[DefaultResolver, DnsResolver], + ) -> None: + self.resolver = resolver + self.cache = cache + self.domain_name_ticker: Optional[asyncio.Task] = None + self.sockets: list[ssl.SSLSocket] = [] + + # If domain name is configured for instance and failover period is set, + # poll for DNS record changes. + if self.cache.conn_name.domain_name and failover_period > 0: + self.domain_name_ticker = asyncio.create_task( + ticker(failover_period, self._check_domain_name) + ) + logger.debug( + f"['{self.cache.conn_name}']: Configured polling of domain " + f"name with failover period of {failover_period} seconds." + ) + + @property + def closed(self) -> bool: + return self.cache.closed + + def _purge_closed_sockets(self) -> None: + """Remove closed sockets from monitored cache. + + If a socket is closed by the database driver we should remove it from + list of sockets. + """ + open_sockets = [] + for socket in self.sockets: + # Check fileno for if socket is closed. Will return + # -1 on failure, which will be used to signal socket closed. + if socket.fileno() != -1: + open_sockets.append(socket) + self.sockets = open_sockets + + async def _check_domain_name(self) -> None: + # remove any closed connections from cache + self._purge_closed_sockets() + try: + # Resolve domain name and see if Cloud SQL instance connection name + # has changed. If it has, close all connections. + new_conn_name = await self.resolver.resolve( + self.cache.conn_name.domain_name + ) + if new_conn_name != self.cache.conn_name: + logger.debug( + f"['{self.cache.conn_name}']: Cloud SQL instance changed " + f"from {self.cache.conn_name.get_connection_string()} to " + f"{new_conn_name.get_connection_string()}, closing all " + "connections!" + ) + await self.close() + + except Exception as e: + # Domain name checks should not be fatal, log error and continue. + logger.debug( + f"['{self.cache.conn_name}']: Unable to check domain name, " + f"domain name {self.cache.conn_name.domain_name} did not " + f"resolve: {e}" + ) + + async def connect_info(self) -> ConnectionInfo: + if self.closed: + raise CacheClosedError( + "Can not get connection info, cache has already been closed." + ) + return await self.cache.connect_info() + + async def force_refresh(self) -> None: + # if cache is closed do not refresh + if self.closed: + return + return await self.cache.force_refresh() + + async def close(self) -> None: + # Cancel domain name ticker task. + if self.domain_name_ticker: + self.domain_name_ticker.cancel() + try: + await self.domain_name_ticker + except asyncio.CancelledError: + logger.debug( + f"['{self.cache.conn_name}']: Cancelled domain name polling task." + ) + finally: + self.domain_name_ticker = None + # If cache is already closed, no further work. + if self.closed: + return + + # Close underyling ConnectionInfoCache + await self.cache.close() + + # Close any still open sockets + for socket in self.sockets: + # Check fileno for if socket is closed. Will return + # -1 on failure, which will be used to signal socket closed. + if socket.fileno() != -1: + socket.close() + + +async def ticker(interval: int, function: Callable, *args: Any, **kwargs: Any) -> None: + """ + Ticker function to sleep for specified interval and then schedule call + to given function. + """ + while True: + # Sleep for interval and then schedule task + await asyncio.sleep(interval) + asyncio.create_task(function(*args, **kwargs)) diff --git a/google/cloud/sql/connector/pg8000.py b/google/cloud/sql/connector/pg8000.py index 1f66dde2a..baaee6615 100644 --- a/google/cloud/sql/connector/pg8000.py +++ b/google/cloud/sql/connector/pg8000.py @@ -14,18 +14,15 @@ limitations under the License. """ -import socket import ssl from typing import Any, TYPE_CHECKING -SERVER_PROXY_PORT = 3307 - if TYPE_CHECKING: import pg8000 def connect( - ip_address: str, ctx: ssl.SSLContext, **kwargs: Any + ip_address: str, sock: ssl.SSLSocket, **kwargs: Any ) -> "pg8000.dbapi.Connection": """Helper function to create a pg8000 DB-API connection object. @@ -33,8 +30,8 @@ def connect( :param ip_address: A string containing an IP address for the Cloud SQL instance. - :type ctx: ssl.SSLContext - :param ctx: An SSLContext object created from the Cloud SQL server CA + :type sock: ssl.SSLSocket + :param sock: An SSLSocket object created from the Cloud SQL server CA cert and ephemeral cert. @@ -48,12 +45,6 @@ def connect( 'Unable to import module "pg8000." Please install and try again.' ) - # Create socket and wrap with context. - sock = ctx.wrap_socket( - socket.create_connection((ip_address, SERVER_PROXY_PORT)), - server_hostname=ip_address, - ) - user = kwargs.pop("user") db = kwargs.pop("db") passwd = kwargs.pop("password", None) diff --git a/google/cloud/sql/connector/pymysql.py b/google/cloud/sql/connector/pymysql.py index a16584367..f83f7076c 100644 --- a/google/cloud/sql/connector/pymysql.py +++ b/google/cloud/sql/connector/pymysql.py @@ -14,18 +14,15 @@ limitations under the License. """ -import socket import ssl from typing import Any, TYPE_CHECKING -SERVER_PROXY_PORT = 3307 - if TYPE_CHECKING: import pymysql def connect( - ip_address: str, ctx: ssl.SSLContext, **kwargs: Any + ip_address: str, sock: ssl.SSLSocket, **kwargs: Any ) -> "pymysql.connections.Connection": """Helper function to create a pymysql DB-API connection object. @@ -33,8 +30,8 @@ def connect( :param ip_address: A string containing an IP address for the Cloud SQL instance. - :type ctx: ssl.SSLContext - :param ctx: An SSLContext object created from the Cloud SQL server CA + :type sock: ssl.SSLSocket + :param sock: An SSLSocket object created from the Cloud SQL server CA cert and ephemeral cert. :rtype: pymysql.Connection @@ -50,11 +47,6 @@ def connect( # allow automatic IAM database authentication to not require password kwargs["password"] = kwargs["password"] if "password" in kwargs else None - # Create socket and wrap with context. - sock = ctx.wrap_socket( - socket.create_connection((ip_address, SERVER_PROXY_PORT)), - server_hostname=ip_address, - ) # pop timeout as timeout arg is called 'connect_timeout' for pymysql timeout = kwargs.pop("timeout") kwargs["connect_timeout"] = kwargs.get("connect_timeout", timeout) diff --git a/google/cloud/sql/connector/pytds.py b/google/cloud/sql/connector/pytds.py index 243d90fd5..3128fdb6a 100644 --- a/google/cloud/sql/connector/pytds.py +++ b/google/cloud/sql/connector/pytds.py @@ -15,27 +15,24 @@ """ import platform -import socket import ssl from typing import Any, TYPE_CHECKING from google.cloud.sql.connector.exceptions import PlatformNotSupportedError -SERVER_PROXY_PORT = 3307 - if TYPE_CHECKING: import pytds -def connect(ip_address: str, ctx: ssl.SSLContext, **kwargs: Any) -> "pytds.Connection": +def connect(ip_address: str, sock: ssl.SSLSocket, **kwargs: Any) -> "pytds.Connection": """Helper function to create a pytds DB-API connection object. :type ip_address: str :param ip_address: A string containing an IP address for the Cloud SQL instance. - :type ctx: ssl.SSLContext - :param ctx: An SSLContext object created from the Cloud SQL server CA + :type sock: ssl.SSLSocket + :param sock: An SSLSocket object created from the Cloud SQL server CA cert and ephemeral cert. @@ -51,11 +48,6 @@ def connect(ip_address: str, ctx: ssl.SSLContext, **kwargs: Any) -> "pytds.Conne db = kwargs.pop("db", None) - # Create socket and wrap with context. - sock = ctx.wrap_socket( - socket.create_connection((ip_address, SERVER_PROXY_PORT)), - server_hostname=ip_address, - ) if kwargs.pop("active_directory_auth", False): if platform.system() == "Windows": # Ignore username and password if using active directory auth diff --git a/tests/conftest.py b/tests/conftest.py index 3a1a38a27..c75de48cb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,4 @@ -"""" +""" Copyright 2021 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/system/test_connector_object.py b/tests/system/test_connector_object.py index c2b5cf125..258b80aaf 100644 --- a/tests/system/test_connector_object.py +++ b/tests/system/test_connector_object.py @@ -1,4 +1,4 @@ -"""" +""" Copyright 2021 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/system/test_ip_types.py b/tests/system/test_ip_types.py index 2df3b1df5..3af49c54f 100644 --- a/tests/system/test_ip_types.py +++ b/tests/system/test_ip_types.py @@ -1,4 +1,4 @@ -"""" +""" Copyright 2021 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/system/test_pymysql_connection.py b/tests/system/test_pymysql_connection.py index 490b1fab4..1e7e26830 100644 --- a/tests/system/test_pymysql_connection.py +++ b/tests/system/test_pymysql_connection.py @@ -1,4 +1,4 @@ -"""" +""" Copyright 2021 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/system/test_pytds_connection.py b/tests/system/test_pytds_connection.py index d848abc18..fd88d230f 100644 --- a/tests/system/test_pytds_connection.py +++ b/tests/system/test_pytds_connection.py @@ -1,4 +1,4 @@ -"""" +""" Copyright 2021 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/test_connection_name.py b/tests/unit/test_connection_name.py index 218034d51..0861d8245 100644 --- a/tests/unit/test_connection_name.py +++ b/tests/unit/test_connection_name.py @@ -31,6 +31,8 @@ def test_ConnectionName() -> None: assert conn_name.domain_name == "" # test ConnectionName str() method prints instance connection name assert str(conn_name) == "project:region:instance" + # test ConnectionName.get_connection_string + assert conn_name.get_connection_string() == "project:region:instance" def test_ConnectionName_with_domain_name() -> None: @@ -42,6 +44,8 @@ def test_ConnectionName_with_domain_name() -> None: assert conn_name.domain_name == "db.example.com" # test ConnectionName str() method prints with domain name assert str(conn_name) == "db.example.com -> project:region:instance" + # test ConnectionName.get_connection_string + assert conn_name.get_connection_string() == "project:region:instance" @pytest.mark.parametrize( diff --git a/tests/unit/test_connector.py b/tests/unit/test_connector.py index e25c9a384..498c947cc 100644 --- a/tests/unit/test_connector.py +++ b/tests/unit/test_connector.py @@ -1,4 +1,4 @@ -"""" +""" Copyright 2021 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/test_instance.py b/tests/unit/test_instance.py index aeedf3399..1a3d60917 100644 --- a/tests/unit/test_instance.py +++ b/tests/unit/test_instance.py @@ -1,4 +1,4 @@ -"""" +""" Copyright 2019 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/test_lazy.py b/tests/unit/test_lazy.py index 344b073e8..c6eef7509 100644 --- a/tests/unit/test_lazy.py +++ b/tests/unit/test_lazy.py @@ -21,6 +21,27 @@ from google.cloud.sql.connector.utils import generate_keys +async def test_LazyRefreshCache_properties(fake_client: CloudSQLClient) -> None: + """ + Test that LazyRefreshCache properties work as expected. + """ + keys = asyncio.create_task(generate_keys()) + conn_name = ConnectionName("test-project", "test-region", "test-instance") + cache = LazyRefreshCache( + conn_name, + client=fake_client, + keys=keys, + enable_iam_auth=False, + ) + # test conn_name property + assert cache.conn_name == conn_name + # test closed property + assert cache.closed is False + # close cache and make sure property is updated + await cache.close() + assert cache.closed is True + + async def test_LazyRefreshCache_connect_info(fake_client: CloudSQLClient) -> None: """ Test that LazyRefreshCache.connect_info works as expected. diff --git a/tests/unit/test_monitored_cache.py b/tests/unit/test_monitored_cache.py new file mode 100644 index 000000000..1eea4eb46 --- /dev/null +++ b/tests/unit/test_monitored_cache.py @@ -0,0 +1,240 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import asyncio +import socket + +import dns.message +import dns.rdataclass +import dns.rdatatype +import dns.resolver +from mock import patch +from mocks import create_ssl_context +import pytest + +from google.cloud.sql.connector.client import CloudSQLClient +from google.cloud.sql.connector.connection_name import ConnectionName +from google.cloud.sql.connector.exceptions import CacheClosedError +from google.cloud.sql.connector.lazy import LazyRefreshCache +from google.cloud.sql.connector.monitored_cache import MonitoredCache +from google.cloud.sql.connector.resolver import DefaultResolver +from google.cloud.sql.connector.resolver import DnsResolver +from google.cloud.sql.connector.utils import generate_keys + +query_text = """id 1234 +opcode QUERY +rcode NOERROR +flags QR AA RD RA +;QUESTION +db.example.com. IN TXT +;ANSWER +db.example.com. 0 IN TXT "test-project:test-region:test-instance" +;AUTHORITY +;ADDITIONAL +""" + + +async def test_MonitoredCache_properties(fake_client: CloudSQLClient) -> None: + """ + Test that MonitoredCache properties work as expected. + """ + conn_name = ConnectionName("test-project", "test-region", "test-instance") + cache = LazyRefreshCache( + conn_name, + client=fake_client, + keys=asyncio.create_task(generate_keys()), + enable_iam_auth=False, + ) + monitored_cache = MonitoredCache(cache, 30, DefaultResolver()) + # test that ticker is not set for instance not using domain name + assert monitored_cache.domain_name_ticker is None + # test closed property + assert monitored_cache.closed is False + # close cache and make sure property is updated + await monitored_cache.close() + assert monitored_cache.closed is True + + +async def test_MonitoredCache_CacheClosedError(fake_client: CloudSQLClient) -> None: + """ + Test that MonitoredCache.connect_info errors when cache is closed. + """ + conn_name = ConnectionName("test-project", "test-region", "test-instance") + cache = LazyRefreshCache( + conn_name, + client=fake_client, + keys=asyncio.create_task(generate_keys()), + enable_iam_auth=False, + ) + monitored_cache = MonitoredCache(cache, 30, DefaultResolver()) + # test closed property + assert monitored_cache.closed is False + # close cache and make sure property is updated + await monitored_cache.close() + assert monitored_cache.closed is True + # attempt to get connect info from closed cache + with pytest.raises(CacheClosedError): + await monitored_cache.connect_info() + + +async def test_MonitoredCache_with_DnsResolver(fake_client: CloudSQLClient) -> None: + """ + Test that MonitoredCache with DnsResolver work as expected. + """ + conn_name = ConnectionName( + "test-project", "test-region", "test-instance", "db.example.com" + ) + cache = LazyRefreshCache( + conn_name, + client=fake_client, + keys=asyncio.create_task(generate_keys()), + enable_iam_auth=False, + ) + # Patch DNS resolution with valid TXT records + with patch("dns.asyncresolver.Resolver.resolve") as mock_connect: + answer = dns.resolver.Answer( + "db.example.com", + dns.rdatatype.TXT, + dns.rdataclass.IN, + dns.message.from_text(query_text), + ) + mock_connect.return_value = answer + resolver = DnsResolver() + resolver.port = 5053 + monitored_cache = MonitoredCache(cache, 30, resolver) + # test that ticker is set for instance using domain name + assert type(monitored_cache.domain_name_ticker) is asyncio.Task + # test closed property + assert monitored_cache.closed is False + # close cache and make sure property is updated + await monitored_cache.close() + assert monitored_cache.closed is True + # domain name ticker should be set back to None + assert monitored_cache.domain_name_ticker is None + + +async def test_MonitoredCache_with_disabled_failover( + fake_client: CloudSQLClient, +) -> None: + """ + Test that MonitoredCache disables DNS polling with failover_period=0 + """ + conn_name = ConnectionName( + "test-project", "test-region", "test-instance", "db.example.com" + ) + cache = LazyRefreshCache( + conn_name, + client=fake_client, + keys=asyncio.create_task(generate_keys()), + enable_iam_auth=False, + ) + monitored_cache = MonitoredCache(cache, 0, DnsResolver()) + # test that ticker is not set when failover is disabled + assert monitored_cache.domain_name_ticker is None + # test closed property + assert monitored_cache.closed is False + # close cache and make sure property is updated + await monitored_cache.close() + assert monitored_cache.closed is True + + +@pytest.mark.usefixtures("server") +async def test_MonitoredCache_check_domain_name(fake_client: CloudSQLClient) -> None: + """ + Test that MonitoredCache is closed when _check_domain_name has domain change. + """ + conn_name = ConnectionName( + "my-project", "my-region", "my-instance", "db.example.com" + ) + cache = LazyRefreshCache( + conn_name, + client=fake_client, + keys=asyncio.create_task(generate_keys()), + enable_iam_auth=False, + ) + # Patch DNS resolution with valid TXT records + with patch("dns.asyncresolver.Resolver.resolve") as mock_connect: + answer = dns.resolver.Answer( + "db.example.com", + dns.rdatatype.TXT, + dns.rdataclass.IN, + dns.message.from_text(query_text), + ) + mock_connect.return_value = answer + resolver = DnsResolver() + resolver.port = 5053 + + # configure a local socket + ip_addr = "127.0.0.1" + context = await create_ssl_context() + sock = context.wrap_socket( + socket.create_connection((ip_addr, 3307)), + server_hostname=ip_addr, + do_handshake_on_connect=False, + ) + # verify socket is open + assert sock.fileno() != -1 + # set failover to 0 to disable polling + monitored_cache = MonitoredCache(cache, 0, resolver) + # add socket to cache + monitored_cache.sockets = [sock] + # check cache is not closed + assert monitored_cache.closed is False + # call _check_domain_name and verify cache is closed + await monitored_cache._check_domain_name() + assert monitored_cache.closed is True + # verify socket was closed + assert sock.fileno() == -1 + + +@pytest.mark.usefixtures("server") +async def test_MonitoredCache_purge_closed_sockets(fake_client: CloudSQLClient) -> None: + """ + Test that MonitoredCache._purge_closed_sockets removes closed sockets from + cache. + """ + conn_name = ConnectionName( + "my-project", "my-region", "my-instance", "db.example.com" + ) + cache = LazyRefreshCache( + conn_name, + client=fake_client, + keys=asyncio.create_task(generate_keys()), + enable_iam_auth=False, + ) + # configure a local socket + ip_addr = "127.0.0.1" + context = await create_ssl_context() + sock = context.wrap_socket( + socket.create_connection((ip_addr, 3307)), + server_hostname=ip_addr, + do_handshake_on_connect=False, + ) + + # set failover to 0 to disable polling + monitored_cache = MonitoredCache(cache, 0, DnsResolver()) + # verify socket is open + assert sock.fileno() != -1 + # add socket to cache + monitored_cache.sockets = [sock] + # call _purge_closed_sockets and verify socket remains + monitored_cache._purge_closed_sockets() + # verify socket is still open + assert sock.fileno() != -1 + assert len(monitored_cache.sockets) == 1 + # close socket + sock.close() + # call _purge_closed_sockets and verify socket is removed + monitored_cache._purge_closed_sockets() + assert len(monitored_cache.sockets) == 0 diff --git a/tests/unit/test_pg8000.py b/tests/unit/test_pg8000.py index 1b2adbb65..e01a53445 100644 --- a/tests/unit/test_pg8000.py +++ b/tests/unit/test_pg8000.py @@ -14,7 +14,7 @@ limitations under the License. """ -from functools import partial +import socket from typing import Any from mock import patch @@ -31,15 +31,14 @@ async def test_pg8000(kwargs: Any) -> None: ip_addr = "127.0.0.1" # build ssl.SSLContext context = await create_ssl_context() - # force all wrap_socket calls to have do_handshake_on_connect=False - setattr( - context, - "wrap_socket", - partial(context.wrap_socket, do_handshake_on_connect=False), + sock = context.wrap_socket( + socket.create_connection((ip_addr, 3307)), + server_hostname=ip_addr, + do_handshake_on_connect=False, ) with patch("pg8000.dbapi.connect") as mock_connect: mock_connect.return_value = True - connection = connect(ip_addr, context, **kwargs) + connection = connect(ip_addr, sock, **kwargs) assert connection is True # verify that driver connection call would be made assert mock_connect.assert_called_once diff --git a/tests/unit/test_pymysql.py b/tests/unit/test_pymysql.py index 69d2aba8f..66b1f22a3 100644 --- a/tests/unit/test_pymysql.py +++ b/tests/unit/test_pymysql.py @@ -14,7 +14,7 @@ limitations under the License. """ -from functools import partial +import socket import ssl from typing import Any @@ -40,15 +40,14 @@ async def test_pymysql(kwargs: Any) -> None: ip_addr = "127.0.0.1" # build ssl.SSLContext context = await create_ssl_context() - # force all wrap_socket calls to have do_handshake_on_connect=False - setattr( - context, - "wrap_socket", - partial(context.wrap_socket, do_handshake_on_connect=False), + sock = context.wrap_socket( + socket.create_connection((ip_addr, 3307)), + server_hostname=ip_addr, + do_handshake_on_connect=False, ) kwargs["timeout"] = 30 with patch("pymysql.Connection") as mock_connect: mock_connect.return_value = MockConnection - pymysql_connect(ip_addr, context, **kwargs) + pymysql_connect(ip_addr, sock, **kwargs) # verify that driver connection call would be made assert mock_connect.assert_called_once diff --git a/tests/unit/test_pytds.py b/tests/unit/test_pytds.py index 633aab74a..9efe00ee5 100644 --- a/tests/unit/test_pytds.py +++ b/tests/unit/test_pytds.py @@ -14,8 +14,8 @@ limitations under the License. """ -from functools import partial import platform +import socket from typing import Any from mock import patch @@ -43,16 +43,15 @@ async def test_pytds(kwargs: Any) -> None: ip_addr = "127.0.0.1" # build ssl.SSLContext context = await create_ssl_context() - # force all wrap_socket calls to have do_handshake_on_connect=False - setattr( - context, - "wrap_socket", - partial(context.wrap_socket, do_handshake_on_connect=False), + sock = context.wrap_socket( + socket.create_connection((ip_addr, 3307)), + server_hostname=ip_addr, + do_handshake_on_connect=False, ) with patch("pytds.connect") as mock_connect: mock_connect.return_value = True - connection = connect(ip_addr, context, **kwargs) + connection = connect(ip_addr, sock, **kwargs) # verify that driver connection call would be made assert connection is True assert mock_connect.assert_called_once @@ -68,17 +67,16 @@ async def test_pytds_platform_error(kwargs: Any) -> None: assert platform.system() == "Linux" # build ssl.SSLContext context = await create_ssl_context() - # force all wrap_socket calls to have do_handshake_on_connect=False - setattr( - context, - "wrap_socket", - partial(context.wrap_socket, do_handshake_on_connect=False), + sock = context.wrap_socket( + socket.create_connection((ip_addr, 3307)), + server_hostname=ip_addr, + do_handshake_on_connect=False, ) # add active_directory_auth to kwargs kwargs["active_directory_auth"] = True # verify that error is thrown with Linux and active_directory_auth with pytest.raises(PlatformNotSupportedError): - connect(ip_addr, context, **kwargs) + connect(ip_addr, sock, **kwargs) @pytest.mark.usefixtures("server") @@ -94,11 +92,10 @@ async def test_pytds_windows_active_directory_auth(kwargs: Any) -> None: assert platform.system() == "Windows" # build ssl.SSLContext context = await create_ssl_context() - # force all wrap_socket calls to have do_handshake_on_connect=False - setattr( - context, - "wrap_socket", - partial(context.wrap_socket, do_handshake_on_connect=False), + sock = context.wrap_socket( + socket.create_connection((ip_addr, 3307)), + server_hostname=ip_addr, + do_handshake_on_connect=False, ) # add active_directory_auth and server_name to kwargs kwargs["active_directory_auth"] = True @@ -107,7 +104,7 @@ async def test_pytds_windows_active_directory_auth(kwargs: Any) -> None: mock_connect.return_value = True with patch("pytds.login.SspiAuth") as mock_login: mock_login.return_value = True - connection = connect(ip_addr, context, **kwargs) + connection = connect(ip_addr, sock, **kwargs) # verify that driver connection call would be made assert mock_login.assert_called_once assert connection is True diff --git a/tests/unit/test_rate_limiter.py b/tests/unit/test_rate_limiter.py index 5e187b81d..8ef586b58 100644 --- a/tests/unit/test_rate_limiter.py +++ b/tests/unit/test_rate_limiter.py @@ -1,4 +1,4 @@ -"""" +""" Copyright 2021 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 6545bc7a8..fe4e90955 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -1,4 +1,4 @@ -"""" +""" Copyright 2019 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); From be8fd060f4df9773b7bb89f13a092b60277c7ea3 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 21 Mar 2025 12:44:23 -0400 Subject: [PATCH 8/8] chore(main): release 1.18.0 (#1247) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 8 ++++++++ google/cloud/sql/connector/version.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c812c21bf..47c853bca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## [1.18.0](https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/compare/v1.17.0...v1.18.0) (2025-03-21) + + +### Features + +* add domain name validation ([#1246](https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/issues/1246)) ([15934bd](https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/commit/15934bd18ab426edd19af67be799876b52895a48)) +* reset connection when the DNS record changes ([#1241](https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/issues/1241)) ([1405f56](https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/commit/1405f564019f6a30a15535ed2e0d1dc108f38195)) + ## [1.17.0](https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/compare/v1.16.0...v1.17.0) (2025-02-12) diff --git a/google/cloud/sql/connector/version.py b/google/cloud/sql/connector/version.py index 18c9772c7..f89ebde3c 100644 --- a/google/cloud/sql/connector/version.py +++ b/google/cloud/sql/connector/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.17.0" +__version__ = "1.18.0"