diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 757c9dca..1ce60852 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 -# created: 2022-05-05T22:08:23.383410683Z + digest: sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c +# created: 2022-07-05T18:31:20.838186805Z diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 1813b007..f3d10f92 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.6', '3.7', '3.8', '3.9', '3.10'] + python: ['3.7', '3.8', '3.9', '3.10'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/.kokoro/continuous/prerelease-deps.cfg b/.kokoro/continuous/prerelease-deps.cfg new file mode 100644 index 00000000..3595fb43 --- /dev/null +++ b/.kokoro/continuous/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/.kokoro/presubmit/prerelease-deps.cfg b/.kokoro/presubmit/prerelease-deps.cfg new file mode 100644 index 00000000..3595fb43 --- /dev/null +++ b/.kokoro/presubmit/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg deleted file mode 100644 index 54527096..00000000 --- a/.kokoro/samples/python3.6/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.6" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py36" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery-storage/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-bigquery-storage/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.6/continuous.cfg deleted file mode 100644 index 7218af14..00000000 --- a/.kokoro/samples/python3.6/continuous.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg deleted file mode 100644 index a6b603ce..00000000 --- a/.kokoro/samples/python3.6/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-bigquery-storage/.kokoro/test-samples-against-head.sh" -} diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg deleted file mode 100644 index 71cd1e59..00000000 --- a/.kokoro/samples/python3.6/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.6/presubmit.cfg deleted file mode 100644 index a1c8d975..00000000 --- a/.kokoro/samples/python3.6/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index 8a324c9c..2c6500ca 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -33,7 +33,7 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.6 -m pip install --upgrade --quiet nox +python3.9 -m pip install --upgrade --quiet nox # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then @@ -76,7 +76,7 @@ for file in samples/**/requirements.txt; do echo "------------------------------------------------------------" # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" + python3.9 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? # If this is a periodic build, send the test log to the FlakyBot. diff --git a/CHANGELOG.md b/CHANGELOG.md index e43d9f5b..5b336aa7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,22 @@ [1]: https://pypi.org/project/google-cloud-bigquery-storage/#history +## [2.14.0](https://github.com/googleapis/python-bigquery-storage/compare/v2.13.2...v2.14.0) (2022-07-08) + + +### Features + +* add audience parameter ([346c719](https://github.com/googleapis/python-bigquery-storage/commit/346c7199e2bd85c77403f1288988c2d64b435ee8)) +* add fields to eventually contain row level errors ([346c719](https://github.com/googleapis/python-bigquery-storage/commit/346c7199e2bd85c77403f1288988c2d64b435ee8)) + + +### Bug Fixes + +* **deps:** require google-api-core >= 2.8.0 ([346c719](https://github.com/googleapis/python-bigquery-storage/commit/346c7199e2bd85c77403f1288988c2d64b435ee8)) +* handle AttributeError in bigquery_storage writer ([#414](https://github.com/googleapis/python-bigquery-storage/issues/414)) ([2cb641a](https://github.com/googleapis/python-bigquery-storage/commit/2cb641a7e0e8bfde23693b4f59f6b914520d7364)) +* Modify client lib retry policy for CreateWriteStream with longer backoff, more error code and longer overall time ([346c719](https://github.com/googleapis/python-bigquery-storage/commit/346c7199e2bd85c77403f1288988c2d64b435ee8)) +* require python 3.7+ ([#468](https://github.com/googleapis/python-bigquery-storage/issues/468)) ([c13b1e5](https://github.com/googleapis/python-bigquery-storage/commit/c13b1e5e59e8ce2794b339809ce9f6a0ba66439c)) + ## [2.13.2](https://github.com/googleapis/python-bigquery-storage/compare/v2.13.1...v2.13.2) (2022-06-06) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 736038c4..4886343a 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.6, 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. + 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -221,13 +221,11 @@ Supported Python Versions We support: -- `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ - `Python 3.9`_ - `Python 3.10`_ -.. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ @@ -239,7 +237,7 @@ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-bigquery-storage/blob/main/noxfile.py -We also explicitly decided to support Python 3 beginning with version 3.6. +We also explicitly decided to support Python 3 beginning with version 3.7. Reasons for this include: - Encouraging use of newest versions of Python 3 diff --git a/README.rst b/README.rst index ff61077f..50184843 100644 --- a/README.rst +++ b/README.rst @@ -1,22 +1,22 @@ -Python Client for BigQuery Storage API -====================================== +Python Client for Google BigQuery Storage API +============================================= -|ga| |pypi| |versions| +|stable| |pypi| |versions| -`BigQuery Storage API`_: +`Google BigQuery Storage API`_: - `Client Library Documentation`_ - `Product Documentation`_ -.. |ga| image:: https://img.shields.io/badge/support-GA-gold.svg - :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability +.. |stable| image:: https://img.shields.io/badge/support-stable-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#stability-levels .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-storage.svg :target: https://pypi.org/project/google-cloud-bigquery-storage/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery-storage.svg :target: https://pypi.org/project/google-cloud-bigquery-storage/ -.. _BigQuery Storage API: https://cloud.google.com/bigquery/docs/reference/storage/ +.. _Google BigQuery Storage API: https://cloud.google.com/bigquery/docs/reference/storage/ .. _Client Library Documentation: https://cloud.google.com/python/docs/reference/bigquerystorage/latest -.. _Product Documentation: https://cloud.google.com/bigquery/docs/reference/storage/ +.. _Product Documentation: https://cloud.google.com/bigquery/docs/reference/storage/ Quick Start ----------- @@ -25,12 +25,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ -3. `Enable the BigQuery Storage API.`_ +3. `Enable the Google BigQuery Storage API.`_ 4. `Setup Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Enable the BigQuery Storage API.: https://console.cloud.google.com/apis/library/bigquerystorage.googleapis.com +.. _Enable the Google BigQuery Storage API.: https://cloud.google.com/bigquery/docs/reference/storage/ .. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation @@ -47,16 +47,25 @@ dependencies. .. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +Code samples and snippets +~~~~~~~~~~~~~~~~~~~~~~~~~ + +Code samples and snippets live in the `samples/` folder. + + Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.6 +Our client libraries are compatible with all current [active](https://devguide.python.org/devcycle/#in-development-main-branch) and [maintenance](https://devguide.python.org/devcycle/#maintenance-branches) versions of +Python. + +Python >= 3.7 Unsupported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7, Python == 3.5. +Python <= 3.6 -The last version of this library compatible with Python 2.7 and 3.5 is -``google-cloud-bigquery-storage==1.1.0``. +If you are using an [end-of-life](https://devguide.python.org/devcycle/#end-of-life-branches) +version of Python, we recommend that you update as soon as possible to an actively supported version. Mac/Linux @@ -80,37 +89,15 @@ Windows \Scripts\activate \Scripts\pip.exe install google-cloud-bigquery-storage -Optional Dependencies -^^^^^^^^^^^^^^^^^^^^^ - -Several features of ``google-cloud-bigquery-storage`` require additional -dependencies. - -* Parse Arrow blocks in a ``read_rows()`` stream using `pyarrow - `_. - - ``pip install 'google-cloud-bigquery-storage[pyarrow]'`` - - -* Parse Avro blocks in a ``read_rows()`` stream using `fastavro - `_. - - ``pip install google-cloud-bigquery-storage[fastavro]`` - -* Download rows to a `pandas `_ - dataframe. - - ``pip install 'google-cloud-bigquery-storage[pandas,pyarrow]'`` - Next Steps ~~~~~~~~~~ -- Read the `Client Library Documentation`_ for BigQuery Storage API - API to see other available methods on the client. -- Read the `BigQuery Storage API Product documentation`_ to learn +- Read the `Client Library Documentation`_ for Google BigQuery Storage API + to see other available methods on the client. +- Read the `Google BigQuery Storage API Product documentation`_ to learn more about the product and see How-to Guides. -- View this `repository’s main README`_ to see the full list of Cloud +- View this `README`_ to see the full list of Cloud APIs that we cover. -.. _BigQuery Storage API Product documentation: https://cloud.google.com/bigquery/docs/reference/storage/ -.. _repository’s main README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst +.. _Google BigQuery Storage API Product documentation: https://cloud.google.com/bigquery/docs/reference/storage/ +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/google/cloud/bigquery_storage/__init__.py b/google/cloud/bigquery_storage/__init__.py index 75ad2cdd..f62428f2 100644 --- a/google/cloud/bigquery_storage/__init__.py +++ b/google/cloud/bigquery_storage/__init__.py @@ -48,6 +48,7 @@ from google.cloud.bigquery_storage_v1.types.storage import GetWriteStreamRequest from google.cloud.bigquery_storage_v1.types.storage import ReadRowsRequest from google.cloud.bigquery_storage_v1.types.storage import ReadRowsResponse +from google.cloud.bigquery_storage_v1.types.storage import RowError from google.cloud.bigquery_storage_v1.types.storage import SplitReadStreamRequest from google.cloud.bigquery_storage_v1.types.storage import SplitReadStreamResponse from google.cloud.bigquery_storage_v1.types.storage import StorageError @@ -86,6 +87,7 @@ "GetWriteStreamRequest", "ReadRowsRequest", "ReadRowsResponse", + "RowError", "SplitReadStreamRequest", "SplitReadStreamResponse", "StorageError", diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py b/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py index 76f97292..786eb555 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/async_client.py @@ -288,20 +288,19 @@ async def sample_create_read_session(): on the ``request`` instance; if ``request`` is provided, this should not be set. max_stream_count (:class:`int`): - Max initial number of streams. If - unset or zero, the server will provide a - value of streams so as to produce - reasonable throughput. Must be - non-negative. The number of streams may - be lower than the requested number, - depending on the amount parallelism that - is reasonable for the table. Error will - be returned if the max count is greater - than the current system max limit of + Max initial number of streams. If unset or zero, the + server will provide a value of streams so as to produce + reasonable throughput. Must be non-negative. The number + of streams may be lower than the requested number, + depending on the amount parallelism that is reasonable + for the table. There is a default system max limit of 1,000. - Streams must be read starting from - offset 0. + This must be greater than or equal to + preferred_min_stream_count. Typically, clients should + either leave this unset to let the system to determine + an upper bound OR set this a size for the maximum "units + of work" it can gracefully handle. This corresponds to the ``max_stream_count`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/client.py b/google/cloud/bigquery_storage_v1/services/big_query_read/client.py index 06d92313..da2c6088 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/client.py @@ -473,6 +473,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def create_read_session( @@ -546,20 +547,19 @@ def sample_create_read_session(): on the ``request`` instance; if ``request`` is provided, this should not be set. max_stream_count (int): - Max initial number of streams. If - unset or zero, the server will provide a - value of streams so as to produce - reasonable throughput. Must be - non-negative. The number of streams may - be lower than the requested number, - depending on the amount parallelism that - is reasonable for the table. Error will - be returned if the max count is greater - than the current system max limit of + Max initial number of streams. If unset or zero, the + server will provide a value of streams so as to produce + reasonable throughput. Must be non-negative. The number + of streams may be lower than the requested number, + depending on the amount parallelism that is reasonable + for the table. There is a default system max limit of 1,000. - Streams must be read starting from - offset 0. + This must be greater than or equal to + preferred_min_stream_count. Typically, clients should + either leave this unset to let the system to determine + an upper bound OR set this a size for the maximum "units + of work" it can gracefully handle. This corresponds to the ``max_stream_count`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py index 21ef0858..fa95cabe 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/base.py @@ -58,6 +58,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -85,11 +86,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -110,6 +106,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -122,6 +123,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py index b00ad968..3717ba1e 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc.py @@ -60,6 +60,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -155,6 +156,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py index a39117f1..b47e745d 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_read/transports/grpc_asyncio.py @@ -105,6 +105,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -200,6 +201,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/bigquery_storage_v1/services/big_query_write/async_client.py b/google/cloud/bigquery_storage_v1/services/big_query_write/async_client.py index fe87ed47..197ac7fe 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_write/async_client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_write/async_client.py @@ -313,16 +313,17 @@ async def sample_create_write_stream(): rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_write_stream, default_retry=retries.Retry( - initial=0.1, - maximum=60.0, + initial=10.0, + maximum=120.0, multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, + core_exceptions.ResourceExhausted, core_exceptions.ServiceUnavailable, ), - deadline=600.0, + deadline=1200.0, ), - default_timeout=600.0, + default_timeout=1200.0, client_info=DEFAULT_CLIENT_INFO, ) diff --git a/google/cloud/bigquery_storage_v1/services/big_query_write/client.py b/google/cloud/bigquery_storage_v1/services/big_query_write/client.py index 7c6cbfaa..4edb3cb2 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_write/client.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_write/client.py @@ -463,6 +463,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def create_write_stream( diff --git a/google/cloud/bigquery_storage_v1/services/big_query_write/transports/base.py b/google/cloud/bigquery_storage_v1/services/big_query_write/transports/base.py index 6cb72fc5..6eeb80d5 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_write/transports/base.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_write/transports/base.py @@ -59,6 +59,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -86,11 +87,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -111,6 +107,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -123,22 +124,28 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.create_write_stream: gapic_v1.method.wrap_method( self.create_write_stream, default_retry=retries.Retry( - initial=0.1, - maximum=60.0, + initial=10.0, + maximum=120.0, multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.DeadlineExceeded, + core_exceptions.ResourceExhausted, core_exceptions.ServiceUnavailable, ), - deadline=600.0, + deadline=1200.0, ), - default_timeout=600.0, + default_timeout=1200.0, client_info=client_info, ), self.append_rows: gapic_v1.method.wrap_method( diff --git a/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc.py b/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc.py index 180d70ee..43337342 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc.py @@ -62,6 +62,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -157,6 +158,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc_asyncio.py b/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc_asyncio.py index 4594c158..c3a47d81 100644 --- a/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_storage_v1/services/big_query_write/transports/grpc_asyncio.py @@ -107,6 +107,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -202,6 +203,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/bigquery_storage_v1/types/__init__.py b/google/cloud/bigquery_storage_v1/types/__init__.py index ce31fa21..5b5d5613 100644 --- a/google/cloud/bigquery_storage_v1/types/__init__.py +++ b/google/cloud/bigquery_storage_v1/types/__init__.py @@ -40,6 +40,7 @@ GetWriteStreamRequest, ReadRowsRequest, ReadRowsResponse, + RowError, SplitReadStreamRequest, SplitReadStreamResponse, StorageError, @@ -79,6 +80,7 @@ "GetWriteStreamRequest", "ReadRowsRequest", "ReadRowsResponse", + "RowError", "SplitReadStreamRequest", "SplitReadStreamResponse", "StorageError", diff --git a/google/cloud/bigquery_storage_v1/types/storage.py b/google/cloud/bigquery_storage_v1/types/storage.py index 024c8747..31a0eb9e 100644 --- a/google/cloud/bigquery_storage_v1/types/storage.py +++ b/google/cloud/bigquery_storage_v1/types/storage.py @@ -46,6 +46,7 @@ "FlushRowsRequest", "FlushRowsResponse", "StorageError", + "RowError", }, ) @@ -60,17 +61,18 @@ class CreateReadSessionRequest(proto.Message): read_session (google.cloud.bigquery_storage_v1.types.ReadSession): Required. Session to be created. max_stream_count (int): - Max initial number of streams. If unset or - zero, the server will provide a value of streams - so as to produce reasonable throughput. Must be - non-negative. The number of streams may be lower - than the requested number, depending on the - amount parallelism that is reasonable for the - table. Error will be returned if the max count - is greater than the current system max limit of - 1,000. - - Streams must be read starting from offset 0. + Max initial number of streams. If unset or zero, the server + will provide a value of streams so as to produce reasonable + throughput. Must be non-negative. The number of streams may + be lower than the requested number, depending on the amount + parallelism that is reasonable for the table. There is a + default system max limit of 1,000. + + This must be greater than or equal to + preferred_min_stream_count. Typically, clients should either + leave this unset to let the system to determine an upper + bound OR set this a size for the maximum "units of work" it + can gracefully handle. """ parent = proto.Field( @@ -471,6 +473,11 @@ class AppendRowsResponse(proto.Message): to user so that user can use it to input new type of message. It will be empty when no schema updates have occurred. + row_errors (Sequence[google.cloud.bigquery_storage_v1.types.RowError]): + If a request failed due to corrupted rows, no + rows in the batch will be appended. The API will + return row level error info, so that the caller + can remove the bad rows and retry the request. """ class AppendResult(proto.Message): @@ -506,6 +513,11 @@ class AppendResult(proto.Message): number=3, message=table.TableSchema, ) + row_errors = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="RowError", + ) class GetWriteStreamRequest(proto.Message): @@ -688,4 +700,37 @@ class StorageErrorCode(proto.Enum): ) +class RowError(proto.Message): + r"""The message that presents row level error info in a request. + + Attributes: + index (int): + Index of the malformed row in the request. + code (google.cloud.bigquery_storage_v1.types.RowError.RowErrorCode): + Structured error reason for a row error. + message (str): + Description of the issue encountered when + processing the row. + """ + + class RowErrorCode(proto.Enum): + r"""Error code for ``RowError``.""" + ROW_ERROR_CODE_UNSPECIFIED = 0 + FIELDS_ERROR = 1 + + index = proto.Field( + proto.INT64, + number=1, + ) + code = proto.Field( + proto.ENUM, + number=2, + enum=RowErrorCode, + ) + message = proto.Field( + proto.STRING, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/bigquery_storage_v1/types/stream.py b/google/cloud/bigquery_storage_v1/types/stream.py index ae8b27e2..3b5c6b9e 100644 --- a/google/cloud/bigquery_storage_v1/types/stream.py +++ b/google/cloud/bigquery_storage_v1/types/stream.py @@ -60,6 +60,7 @@ class ReadSession(proto.Message): assigned and currently cannot be specified or updated. data_format (google.cloud.bigquery_storage_v1.types.DataFormat): Immutable. Data format of the output data. + DATA_FORMAT_UNSPECIFIED not supported. avro_schema (google.cloud.bigquery_storage_v1.types.AvroSchema): Output only. Avro schema. diff --git a/google/cloud/bigquery_storage_v1/types/table.py b/google/cloud/bigquery_storage_v1/types/table.py index 65c47f01..006ac1c5 100644 --- a/google/cloud/bigquery_storage_v1/types/table.py +++ b/google/cloud/bigquery_storage_v1/types/table.py @@ -26,7 +26,9 @@ class TableSchema(proto.Message): - r"""Schema of a table. + r"""Schema of a table. This schema is a subset of + google.cloud.bigquery.v2.TableSchema containing information + necessary to generate valid message to write to BigQuery. Attributes: fields (Sequence[google.cloud.bigquery_storage_v1.types.TableFieldSchema]): diff --git a/google/cloud/bigquery_storage_v1/writer.py b/google/cloud/bigquery_storage_v1/writer.py index 2268debc..fbcdfe36 100644 --- a/google/cloud/bigquery_storage_v1/writer.py +++ b/google/cloud/bigquery_storage_v1/writer.py @@ -182,21 +182,35 @@ def _open( # ValueError: Can not send() on an RPC that has never been open()ed. # # when they try to send a request. - while not self._rpc.is_active and self._consumer.is_active: - # Avoid 100% CPU while waiting for RPC to be ready. - time.sleep(_WRITE_OPEN_INTERVAL) - - # TODO: Check retry.deadline instead of (per-request) timeout. - # Blocked by - # https://github.com/googleapis/python-api-core/issues/262 - if timeout is None: - continue - current_time = time.monotonic() - if current_time - start_time > timeout: - break + try: + while not self._rpc.is_active and self._consumer.is_active: + # Avoid 100% CPU while waiting for RPC to be ready. + time.sleep(_WRITE_OPEN_INTERVAL) + + # TODO: Check retry.deadline instead of (per-request) timeout. + # Blocked by + # https://github.com/googleapis/python-api-core/issues/262 + if timeout is None: + continue + current_time = time.monotonic() + if current_time - start_time > timeout: + break + except AttributeError: + # Handle the AttributeError which can occur if the stream is + # unable to be opened. In that case, self._rpc or self._consumer + # may be None. + pass + + try: + is_consumer_active = self._consumer.is_active + except AttributeError: + # Handle the AttributeError which can occur if the stream is + # unable to be opened. In that case, self._consumer + # may be None. + is_consumer_active = False # Something went wrong when opening the RPC. - if not self._consumer.is_active: + if not is_consumer_active: # TODO: Share the exception from _rpc.open(). Blocked by # https://github.com/googleapis/python-api-core/issues/268 request_exception = exceptions.Unknown( diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/client.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/client.py index f1d57d3f..bbd30c19 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/client.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/client.py @@ -475,6 +475,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def create_read_session( diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py index 4734869d..6433df14 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/base.py @@ -58,6 +58,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -85,11 +86,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -110,6 +106,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -122,6 +123,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc.py index 03e2c4bc..794d4137 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc.py @@ -62,6 +62,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -157,6 +158,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc_asyncio.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc_asyncio.py index 2c987b30..2a42b07d 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_read/transports/grpc_asyncio.py @@ -107,6 +107,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -202,6 +203,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/client.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/client.py index 70562b3d..ab1520c7 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/client.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/client.py @@ -461,6 +461,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def create_write_stream( diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/base.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/base.py index 86e7efa6..57067ceb 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/base.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/base.py @@ -59,6 +59,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -86,11 +87,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -111,6 +107,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -123,6 +124,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc.py index a21f17ca..eb65380c 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc.py @@ -60,6 +60,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -155,6 +156,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc_asyncio.py b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc_asyncio.py index a9638020..e8a4ed82 100644 --- a/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_storage_v1beta2/services/big_query_write/transports/grpc_asyncio.py @@ -105,6 +105,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -200,6 +201,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/google/cloud/bigquery_storage_v1beta2/writer.py b/google/cloud/bigquery_storage_v1beta2/writer.py index 5cf101ba..20019afe 100644 --- a/google/cloud/bigquery_storage_v1beta2/writer.py +++ b/google/cloud/bigquery_storage_v1beta2/writer.py @@ -182,21 +182,35 @@ def _open( # ValueError: Can not send() on an RPC that has never been open()ed. # # when they try to send a request. - while not self._rpc.is_active and self._consumer.is_active: - # Avoid 100% CPU while waiting for RPC to be ready. - time.sleep(_WRITE_OPEN_INTERVAL) - - # TODO: Check retry.deadline instead of (per-request) timeout. - # Blocked by - # https://github.com/googleapis/python-api-core/issues/262 - if timeout is None: - continue - current_time = time.monotonic() - if current_time - start_time > timeout: - break + try: + while not self._rpc.is_active and self._consumer.is_active: + # Avoid 100% CPU while waiting for RPC to be ready. + time.sleep(_WRITE_OPEN_INTERVAL) + + # TODO: Check retry.deadline instead of (per-request) timeout. + # Blocked by + # https://github.com/googleapis/python-api-core/issues/262 + if timeout is None: + continue + current_time = time.monotonic() + if current_time - start_time > timeout: + break + except AttributeError: + # Handle the AttributeError which can occur if the stream is + # unable to be opened. In that case, self._rpc or self._consumer + # may be None. + pass + + try: + is_consumer_active = self._consumer.is_active + except AttributeError: + # Handle the AttributeError which can occur if the stream is + # unable to be opened. In that case, self._consumer + # may be None. + is_consumer_active = False # Something went wrong when opening the RPC. - if not self._consumer.is_active: + if not is_consumer_active: # TODO: Share the exception from _rpc.open(). Blocked by # https://github.com/googleapis/python-api-core/issues/268 request_exception = exceptions.Unknown( diff --git a/noxfile.py b/noxfile.py index ebf5ac33..fe6e3b98 100644 --- a/noxfile.py +++ b/noxfile.py @@ -19,6 +19,7 @@ from __future__ import absolute_import import os import pathlib +import re import shutil import warnings @@ -30,7 +31,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -335,3 +336,90 @@ def docfx(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + session.install(*UNIT_TEST_STANDARD_DEPENDENCIES) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + if os.path.exists("samples/snippets/requirements.txt"): + session.install("-r", "samples/snippets/requirements.txt") + + if os.path.exists("samples/snippets/requirements-test.txt"): + session.install("-r", "samples/snippets/requirements-test.txt") + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path) or os.path.exists(system_test_folder_path): + session.run("py.test", "tests/system") + + snippets_test_path = os.path.join("samples", "snippets") + + # Only run samples tests if found. + if os.path.exists(snippets_test_path): + session.run("py.test", "samples/snippets") diff --git a/samples/quickstart/noxfile.py b/samples/quickstart/noxfile.py index a40410b5..29b5bc85 100644 --- a/samples/quickstart/noxfile.py +++ b/samples/quickstart/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/samples/snippets/customer_record_pb2.py b/samples/snippets/customer_record_pb2.py index 14201ea9..d797784b 100644 --- a/samples/snippets/customer_record_pb2.py +++ b/samples/snippets/customer_record_pb2.py @@ -1,10 +1,9 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: customer_record.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +"""Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database @@ -14,86 +13,26 @@ _sym_db = _symbol_database.Default() -DESCRIPTOR = _descriptor.FileDescriptor( - name="customer_record.proto", - package="", - syntax="proto2", - serialized_options=None, - serialized_pb=_b( - '\n\x15\x63ustomer_record.proto"8\n\x0e\x43ustomerRecord\x12\x15\n\rcustomer_name\x18\x01 \x01(\t\x12\x0f\n\x07row_num\x18\x02 \x02(\x03' - ), -) - - -_CUSTOMERRECORD = _descriptor.Descriptor( - name="CustomerRecord", - full_name="CustomerRecord", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="customer_name", - full_name="CustomerRecord.customer_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="row_num", - full_name="CustomerRecord.row_num", - index=1, - number=2, - type=3, - cpp_type=2, - label=2, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto2", - extension_ranges=[], - oneofs=[], - serialized_start=25, - serialized_end=81, +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x15\x63ustomer_record.proto"8\n\x0e\x43ustomerRecord\x12\x15\n\rcustomer_name\x18\x01 \x01(\t\x12\x0f\n\x07row_num\x18\x02 \x02(\x03' ) -DESCRIPTOR.message_types_by_name["CustomerRecord"] = _CUSTOMERRECORD -_sym_db.RegisterFileDescriptor(DESCRIPTOR) +_CUSTOMERRECORD = DESCRIPTOR.message_types_by_name["CustomerRecord"] CustomerRecord = _reflection.GeneratedProtocolMessageType( "CustomerRecord", (_message.Message,), - dict( - DESCRIPTOR=_CUSTOMERRECORD, - __module__="customer_record_pb2" + { + "DESCRIPTOR": _CUSTOMERRECORD, + "__module__": "customer_record_pb2" # @@protoc_insertion_point(class_scope:CustomerRecord) - ), + }, ) _sym_db.RegisterMessage(CustomerRecord) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _CUSTOMERRECORD._serialized_start = 25 + _CUSTOMERRECORD._serialized_end = 81 # @@protoc_insertion_point(module_scope) diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index a40410b5..29b5bc85 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/samples/snippets/sample_data_pb2.py b/samples/snippets/sample_data_pb2.py index b1478016..85106843 100644 --- a/samples/snippets/sample_data_pb2.py +++ b/samples/snippets/sample_data_pb2.py @@ -3,6 +3,7 @@ # source: sample_data.proto """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database @@ -12,389 +13,13 @@ _sym_db = _symbol_database.Default() -DESCRIPTOR = _descriptor.FileDescriptor( - name="sample_data.proto", - package="", - syntax="proto2", - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n\x11sample_data.proto"\xa9\x03\n\nSampleData\x12\x10\n\x08\x62ool_col\x18\x01 \x01(\x08\x12\x11\n\tbytes_col\x18\x02 \x01(\x0c\x12\x13\n\x0b\x66loat64_col\x18\x03 \x01(\x01\x12\x11\n\tint64_col\x18\x04 \x01(\x03\x12\x12\n\nstring_col\x18\x05 \x01(\t\x12\x10\n\x08\x64\x61te_col\x18\x06 \x01(\x05\x12\x14\n\x0c\x64\x61tetime_col\x18\x07 \x01(\t\x12\x15\n\rgeography_col\x18\x08 \x01(\t\x12\x13\n\x0bnumeric_col\x18\t \x01(\t\x12\x16\n\x0e\x62ignumeric_col\x18\n \x01(\t\x12\x10\n\x08time_col\x18\x0b \x01(\t\x12\x15\n\rtimestamp_col\x18\x0c \x01(\x03\x12\x12\n\nint64_list\x18\r \x03(\x03\x12,\n\nstruct_col\x18\x0e \x01(\x0b\x32\x18.SampleData.SampleStruct\x12-\n\x0bstruct_list\x18\x0f \x03(\x0b\x32\x18.SampleData.SampleStruct\x12\x0f\n\x07row_num\x18\x10 \x02(\x03\x1a#\n\x0cSampleStruct\x12\x13\n\x0bsub_int_col\x18\x01 \x01(\x03', +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x11sample_data.proto"\xa9\x03\n\nSampleData\x12\x10\n\x08\x62ool_col\x18\x01 \x01(\x08\x12\x11\n\tbytes_col\x18\x02 \x01(\x0c\x12\x13\n\x0b\x66loat64_col\x18\x03 \x01(\x01\x12\x11\n\tint64_col\x18\x04 \x01(\x03\x12\x12\n\nstring_col\x18\x05 \x01(\t\x12\x10\n\x08\x64\x61te_col\x18\x06 \x01(\x05\x12\x14\n\x0c\x64\x61tetime_col\x18\x07 \x01(\t\x12\x15\n\rgeography_col\x18\x08 \x01(\t\x12\x13\n\x0bnumeric_col\x18\t \x01(\t\x12\x16\n\x0e\x62ignumeric_col\x18\n \x01(\t\x12\x10\n\x08time_col\x18\x0b \x01(\t\x12\x15\n\rtimestamp_col\x18\x0c \x01(\x03\x12\x12\n\nint64_list\x18\r \x03(\x03\x12,\n\nstruct_col\x18\x0e \x01(\x0b\x32\x18.SampleData.SampleStruct\x12-\n\x0bstruct_list\x18\x0f \x03(\x0b\x32\x18.SampleData.SampleStruct\x12\x0f\n\x07row_num\x18\x10 \x02(\x03\x1a#\n\x0cSampleStruct\x12\x13\n\x0bsub_int_col\x18\x01 \x01(\x03' ) -_SAMPLEDATA_SAMPLESTRUCT = _descriptor.Descriptor( - name="SampleStruct", - full_name="SampleData.SampleStruct", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="sub_int_col", - full_name="SampleData.SampleStruct.sub_int_col", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto2", - extension_ranges=[], - oneofs=[], - serialized_start=412, - serialized_end=447, -) - -_SAMPLEDATA = _descriptor.Descriptor( - name="SampleData", - full_name="SampleData", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="bool_col", - full_name="SampleData.bool_col", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="bytes_col", - full_name="SampleData.bytes_col", - index=1, - number=2, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="float64_col", - full_name="SampleData.float64_col", - index=2, - number=3, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="int64_col", - full_name="SampleData.int64_col", - index=3, - number=4, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="string_col", - full_name="SampleData.string_col", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="date_col", - full_name="SampleData.date_col", - index=5, - number=6, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="datetime_col", - full_name="SampleData.datetime_col", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="geography_col", - full_name="SampleData.geography_col", - index=7, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="numeric_col", - full_name="SampleData.numeric_col", - index=8, - number=9, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="bignumeric_col", - full_name="SampleData.bignumeric_col", - index=9, - number=10, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="time_col", - full_name="SampleData.time_col", - index=10, - number=11, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="timestamp_col", - full_name="SampleData.timestamp_col", - index=11, - number=12, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="int64_list", - full_name="SampleData.int64_list", - index=12, - number=13, - type=3, - cpp_type=2, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="struct_col", - full_name="SampleData.struct_col", - index=13, - number=14, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="struct_list", - full_name="SampleData.struct_list", - index=14, - number=15, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="row_num", - full_name="SampleData.row_num", - index=15, - number=16, - type=3, - cpp_type=2, - label=2, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[ - _SAMPLEDATA_SAMPLESTRUCT, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto2", - extension_ranges=[], - oneofs=[], - serialized_start=22, - serialized_end=447, -) - -_SAMPLEDATA_SAMPLESTRUCT.containing_type = _SAMPLEDATA -_SAMPLEDATA.fields_by_name["struct_col"].message_type = _SAMPLEDATA_SAMPLESTRUCT -_SAMPLEDATA.fields_by_name["struct_list"].message_type = _SAMPLEDATA_SAMPLESTRUCT -DESCRIPTOR.message_types_by_name["SampleData"] = _SAMPLEDATA -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - +_SAMPLEDATA = DESCRIPTOR.message_types_by_name["SampleData"] +_SAMPLEDATA_SAMPLESTRUCT = _SAMPLEDATA.nested_types_by_name["SampleStruct"] SampleData = _reflection.GeneratedProtocolMessageType( "SampleData", (_message.Message,), @@ -416,5 +41,11 @@ _sym_db.RegisterMessage(SampleData) _sym_db.RegisterMessage(SampleData.SampleStruct) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _SAMPLEDATA._serialized_start = 22 + _SAMPLEDATA._serialized_end = 447 + _SAMPLEDATA_SAMPLESTRUCT._serialized_start = 412 + _SAMPLEDATA_SAMPLESTRUCT._serialized_end = 447 # @@protoc_insertion_point(module_scope) diff --git a/samples/to_dataframe/noxfile.py b/samples/to_dataframe/noxfile.py index a40410b5..29b5bc85 100644 --- a/samples/to_dataframe/noxfile.py +++ b/samples/to_dataframe/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/samples/to_dataframe/requirements.txt b/samples/to_dataframe/requirements.txt index 3b4800a8..99a91731 100644 --- a/samples/to_dataframe/requirements.txt +++ b/samples/to_dataframe/requirements.txt @@ -4,7 +4,7 @@ google-cloud-bigquery==3.1.0 pyarrow==8.0.0 ipython===7.31.1; python_version == '3.7' ipython===8.0.1; python_version == '3.8' -ipython==8.3.0; python_version >= '3.9' +ipython==8.4.0; python_version >= '3.9' pandas===1.3.5; python_version == '3.7' pandas==1.4.2; python_version > '3.7' tqdm==4.64.0 diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst index 275d6498..6f069c6c 100644 --- a/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -12,7 +12,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 3.6+. +#. Create a virtualenv. Samples are compatible with Python 3.7+. .. code-block:: bash diff --git a/setup.py b/setup.py index b152a3bc..cd0de033 100644 --- a/setup.py +++ b/setup.py @@ -21,13 +21,10 @@ name = "google-cloud-bigquery-storage" description = "BigQuery Storage API API client library" -version = "2.13.2" +version = "2.14.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - # NOTE: Maintainers, please do not require google-api-core>=2.x.x - # Until this issue is closed - # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", + "google-api-core[grpc] >= 2.8.0, <3.0.0dev", "proto-plus >= 1.18.0, <2.0.0dev", "protobuf >= 3.19.0, <4.0.0dev", ] @@ -69,7 +66,6 @@ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -82,7 +78,7 @@ namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires=">=3.6", + python_requires=">=3.7", scripts=["scripts/fixup_bigquery_storage_v1_keywords.py"], include_package_data=True, zip_safe=False, diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt deleted file mode 100644 index 4493c73b..00000000 --- a/testing/constraints-3.6.txt +++ /dev/null @@ -1,14 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List *all* library dependencies and extras in this file. -# Pin the version to the lower bound. -# -# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -# Then this file should have foo==1.14.0 -google-api-core==1.31.5 -proto-plus==1.18.0 -libcst==0.2.5 -fastavro==0.21.2 -pandas==0.21.1 -pyarrow==0.15.0 -protobuf==3.19.0 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 0f17bf67..226465ad 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -5,7 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.31.5 +google-api-core==2.8.0 proto-plus==1.18.0 libcst==0.2.5 fastavro==0.21.2 diff --git a/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py b/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py index 5e3512e0..08157e10 100644 --- a/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py +++ b/tests/unit/gapic/bigquery_storage_v1/test_big_query_read.py @@ -224,6 +224,7 @@ def test_big_query_read_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -241,6 +242,7 @@ def test_big_query_read_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -258,6 +260,7 @@ def test_big_query_read_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -287,6 +290,25 @@ def test_big_query_read_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -352,6 +374,7 @@ def test_big_query_read_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -386,6 +409,7 @@ def test_big_query_read_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -408,6 +432,7 @@ def test_big_query_read_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -518,6 +543,7 @@ def test_big_query_read_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -556,6 +582,7 @@ def test_big_query_read_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -574,6 +601,7 @@ def test_big_query_read_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -612,6 +640,7 @@ def test_big_query_read_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -1547,6 +1576,28 @@ def test_big_query_read_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.BigQueryReadGrpcTransport, + transports.BigQueryReadGrpcAsyncIOTransport, + ], +) +def test_big_query_read_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2062,4 +2113,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py b/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py index 7f666bb2..e4dd0336 100644 --- a/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py +++ b/tests/unit/gapic/bigquery_storage_v1/test_big_query_write.py @@ -234,6 +234,7 @@ def test_big_query_write_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -251,6 +252,7 @@ def test_big_query_write_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -268,6 +270,7 @@ def test_big_query_write_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -297,6 +300,25 @@ def test_big_query_write_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -364,6 +386,7 @@ def test_big_query_write_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -398,6 +421,7 @@ def test_big_query_write_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -420,6 +444,7 @@ def test_big_query_write_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -534,6 +559,7 @@ def test_big_query_write_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -572,6 +598,7 @@ def test_big_query_write_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -592,6 +619,7 @@ def test_big_query_write_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -630,6 +658,7 @@ def test_big_query_write_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -2187,6 +2216,28 @@ def test_big_query_write_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.BigQueryWriteGrpcTransport, + transports.BigQueryWriteGrpcAsyncIOTransport, + ], +) +def test_big_query_write_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2690,4 +2741,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py b/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py index 89976b2e..f80b5ba4 100644 --- a/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py +++ b/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_read.py @@ -226,6 +226,7 @@ def test_big_query_read_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -243,6 +244,7 @@ def test_big_query_read_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -260,6 +262,7 @@ def test_big_query_read_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -289,6 +292,25 @@ def test_big_query_read_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -354,6 +376,7 @@ def test_big_query_read_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -388,6 +411,7 @@ def test_big_query_read_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -410,6 +434,7 @@ def test_big_query_read_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -520,6 +545,7 @@ def test_big_query_read_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -558,6 +584,7 @@ def test_big_query_read_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -576,6 +603,7 @@ def test_big_query_read_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -614,6 +642,7 @@ def test_big_query_read_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -1541,6 +1570,28 @@ def test_big_query_read_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.BigQueryReadGrpcTransport, + transports.BigQueryReadGrpcAsyncIOTransport, + ], +) +def test_big_query_read_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2056,4 +2107,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py b/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py index 8d2ab08b..9efb828b 100644 --- a/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py +++ b/tests/unit/gapic/bigquery_storage_v1beta2/test_big_query_write.py @@ -234,6 +234,7 @@ def test_big_query_write_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -251,6 +252,7 @@ def test_big_query_write_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -268,6 +270,7 @@ def test_big_query_write_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -297,6 +300,25 @@ def test_big_query_write_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -364,6 +386,7 @@ def test_big_query_write_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -398,6 +421,7 @@ def test_big_query_write_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -420,6 +444,7 @@ def test_big_query_write_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -534,6 +559,7 @@ def test_big_query_write_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -572,6 +598,7 @@ def test_big_query_write_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -592,6 +619,7 @@ def test_big_query_write_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -630,6 +658,7 @@ def test_big_query_write_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -2179,6 +2208,28 @@ def test_big_query_write_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.BigQueryWriteGrpcTransport, + transports.BigQueryWriteGrpcAsyncIOTransport, + ], +) +def test_big_query_write_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -2682,4 +2733,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, )