diff --git a/.flake8 b/.flake8 index 29227d4..2e43874 100644 --- a/.flake8 +++ b/.flake8 @@ -16,7 +16,7 @@ # Generated by synthtool. DO NOT EDIT! [flake8] -ignore = E203, E266, E501, W503 +ignore = E203, E231, E266, E501, W503 exclude = # Exclude generated code. **/proto/** diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 44c78f7..757c9dc 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 + digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 +# created: 2022-05-05T22:08:23.383410683Z diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml new file mode 100644 index 0000000..311ebbb --- /dev/null +++ b/.github/auto-approve.yml @@ -0,0 +1,3 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve +processes: + - "OwlBotTemplateChanges" diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml new file mode 100644 index 0000000..41bff0b --- /dev/null +++ b/.github/auto-label.yaml @@ -0,0 +1,15 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +requestsize: + enabled: true diff --git a/.github/release-please.yml b/.github/release-please.yml index 466597e..6def37a 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1,2 +1,8 @@ releaseType: python handleGHRelease: true +# NOTE: this section is generated by synthtool.languages.python +# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py +branches: +- branch: v0 + handleGHRelease: true + releaseType: python diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 4e1b1fb..238b87b 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:20.04 +from ubuntu:22.04 ENV DEBIAN_FRONTEND noninteractive @@ -60,8 +60,24 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb +###################### Install python 3.8.11 + +# Download python 3.8.11 +RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz + +# Extract files +RUN tar -xvf Python-3.8.11.tgz + +# Install python 3.8.11 +RUN ./Python-3.8.11/configure --enable-optimizations +RUN make altinstall + +###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.8 /tmp/get-pip.py \ + && python3 /tmp/get-pip.py \ && rm /tmp/get-pip.py +# Test pip +RUN python3 -m pip + CMD ["python3.8"] diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 62eb5a7..46d2371 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 19.10b0 + rev: 22.3.0 hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 diff --git a/CHANGELOG.md b/CHANGELOG.md index aab6159..f80381a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,18 @@ # Changelog -### [1.4.1](https://github.com/googleapis/python-data-fusion/compare/v1.4.0...v1.4.1) (2022-03-05) +## [1.4.2](https://github.com/googleapis/python-data-fusion/compare/v1.4.1...v1.4.2) (2022-06-03) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#112](https://github.com/googleapis/python-data-fusion/issues/112)) ([adc00c1](https://github.com/googleapis/python-data-fusion/commit/adc00c1c7a75d1d06cef8e2a841353c0b7365457)) + + +### Documentation + +* fix changelog header to consistent size ([#113](https://github.com/googleapis/python-data-fusion/issues/113)) ([2ec8e36](https://github.com/googleapis/python-data-fusion/commit/2ec8e36dcfe1acacf0b7e32f06fcadad9029cb3f)) + +## [1.4.1](https://github.com/googleapis/python-data-fusion/compare/v1.4.0...v1.4.1) (2022-03-05) ### Bug Fixes @@ -26,7 +38,7 @@ * update definitions for cloud/datafusion/v1 and cloud/datafusion/v1beta1 ([#58](https://github.com/googleapis/python-data-fusion/issues/58)) ([6b38819](https://github.com/googleapis/python-data-fusion/commit/6b38819f26fb72dc67ac2a4dda1c543d91b7f835)) -### [1.2.1](https://www.github.com/googleapis/python-data-fusion/compare/v1.2.0...v1.2.1) (2021-11-01) +## [1.2.1](https://www.github.com/googleapis/python-data-fusion/compare/v1.2.0...v1.2.1) (2021-11-01) ### Bug Fixes @@ -53,14 +65,14 @@ * add context manager support in client ([#36](https://www.github.com/googleapis/python-data-fusion/issues/36)) ([3c238d5](https://www.github.com/googleapis/python-data-fusion/commit/3c238d5d26f219dd107f09dcec3fc09977d64760)) -### [1.0.2](https://www.github.com/googleapis/python-data-fusion/compare/v1.0.1...v1.0.2) (2021-10-04) +## [1.0.2](https://www.github.com/googleapis/python-data-fusion/compare/v1.0.1...v1.0.2) (2021-10-04) ### Bug Fixes * improper types in pagers generation ([b278e83](https://www.github.com/googleapis/python-data-fusion/commit/b278e83f4a087ac21fd02eefc5f79e8c02abcfb5)) -### [1.0.1](https://www.github.com/googleapis/python-data-fusion/compare/v1.0.0...v1.0.1) (2021-09-24) +## [1.0.1](https://www.github.com/googleapis/python-data-fusion/compare/v1.0.0...v1.0.1) (2021-09-24) ### Bug Fixes @@ -79,7 +91,7 @@ * migrate to main branch ([#20](https://www.github.com/googleapis/python-data-fusion/issues/20)) ([7edab48](https://www.github.com/googleapis/python-data-fusion/commit/7edab48370aeb6194f864bc2d402b8ffa7761a51)) -### [0.1.2](https://www.github.com/googleapis/python-data-fusion/compare/v0.1.1...v0.1.2) (2021-07-29) +## [0.1.2](https://www.github.com/googleapis/python-data-fusion/compare/v0.1.1...v0.1.2) (2021-07-29) ### Bug Fixes @@ -96,7 +108,7 @@ * release as 0.1.2 ([#11](https://www.github.com/googleapis/python-data-fusion/issues/11)) ([6b418a0](https://www.github.com/googleapis/python-data-fusion/commit/6b418a0d333f81771a597e0a554d2bf05b31d962)) -### [0.1.1](https://www.github.com/googleapis/python-data-fusion/compare/v0.1.0...v0.1.1) (2021-07-21) +## [0.1.1](https://www.github.com/googleapis/python-data-fusion/compare/v0.1.0...v0.1.1) (2021-07-21) ### Bug Fixes diff --git a/docs/conf.py b/docs/conf.py index ad4cf63..4d55c7c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -24,9 +24,9 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os import shlex +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the @@ -361,7 +361,10 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), diff --git a/google/cloud/data_fusion/__init__.py b/google/cloud/data_fusion/__init__.py index 7951c90..60c0bb1 100644 --- a/google/cloud/data_fusion/__init__.py +++ b/google/cloud/data_fusion/__init__.py @@ -14,26 +14,27 @@ # limitations under the License. # -from google.cloud.data_fusion_v1.services.data_fusion.client import DataFusionClient from google.cloud.data_fusion_v1.services.data_fusion.async_client import ( DataFusionAsyncClient, ) - -from google.cloud.data_fusion_v1.types.datafusion import Accelerator -from google.cloud.data_fusion_v1.types.datafusion import CreateInstanceRequest -from google.cloud.data_fusion_v1.types.datafusion import CryptoKeyConfig -from google.cloud.data_fusion_v1.types.datafusion import DeleteInstanceRequest -from google.cloud.data_fusion_v1.types.datafusion import GetInstanceRequest -from google.cloud.data_fusion_v1.types.datafusion import Instance -from google.cloud.data_fusion_v1.types.datafusion import ListAvailableVersionsRequest -from google.cloud.data_fusion_v1.types.datafusion import ListAvailableVersionsResponse -from google.cloud.data_fusion_v1.types.datafusion import ListInstancesRequest -from google.cloud.data_fusion_v1.types.datafusion import ListInstancesResponse -from google.cloud.data_fusion_v1.types.datafusion import NetworkConfig -from google.cloud.data_fusion_v1.types.datafusion import OperationMetadata -from google.cloud.data_fusion_v1.types.datafusion import RestartInstanceRequest -from google.cloud.data_fusion_v1.types.datafusion import UpdateInstanceRequest -from google.cloud.data_fusion_v1.types.datafusion import Version +from google.cloud.data_fusion_v1.services.data_fusion.client import DataFusionClient +from google.cloud.data_fusion_v1.types.datafusion import ( + Accelerator, + CreateInstanceRequest, + CryptoKeyConfig, + DeleteInstanceRequest, + GetInstanceRequest, + Instance, + ListAvailableVersionsRequest, + ListAvailableVersionsResponse, + ListInstancesRequest, + ListInstancesResponse, + NetworkConfig, + OperationMetadata, + RestartInstanceRequest, + UpdateInstanceRequest, + Version, +) __all__ = ( "DataFusionClient", diff --git a/google/cloud/data_fusion_v1/__init__.py b/google/cloud/data_fusion_v1/__init__.py index 00e27ef..9a6461b 100644 --- a/google/cloud/data_fusion_v1/__init__.py +++ b/google/cloud/data_fusion_v1/__init__.py @@ -14,24 +14,24 @@ # limitations under the License. # -from .services.data_fusion import DataFusionClient -from .services.data_fusion import DataFusionAsyncClient - -from .types.datafusion import Accelerator -from .types.datafusion import CreateInstanceRequest -from .types.datafusion import CryptoKeyConfig -from .types.datafusion import DeleteInstanceRequest -from .types.datafusion import GetInstanceRequest -from .types.datafusion import Instance -from .types.datafusion import ListAvailableVersionsRequest -from .types.datafusion import ListAvailableVersionsResponse -from .types.datafusion import ListInstancesRequest -from .types.datafusion import ListInstancesResponse -from .types.datafusion import NetworkConfig -from .types.datafusion import OperationMetadata -from .types.datafusion import RestartInstanceRequest -from .types.datafusion import UpdateInstanceRequest -from .types.datafusion import Version +from .services.data_fusion import DataFusionAsyncClient, DataFusionClient +from .types.datafusion import ( + Accelerator, + CreateInstanceRequest, + CryptoKeyConfig, + DeleteInstanceRequest, + GetInstanceRequest, + Instance, + ListAvailableVersionsRequest, + ListAvailableVersionsResponse, + ListInstancesRequest, + ListInstancesResponse, + NetworkConfig, + OperationMetadata, + RestartInstanceRequest, + UpdateInstanceRequest, + Version, +) __all__ = ( "DataFusionAsyncClient", diff --git a/google/cloud/data_fusion_v1/services/data_fusion/__init__.py b/google/cloud/data_fusion_v1/services/data_fusion/__init__.py index 268bd96..6123139 100644 --- a/google/cloud/data_fusion_v1/services/data_fusion/__init__.py +++ b/google/cloud/data_fusion_v1/services/data_fusion/__init__.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import DataFusionClient from .async_client import DataFusionAsyncClient +from .client import DataFusionClient __all__ = ( "DataFusionClient", diff --git a/google/cloud/data_fusion_v1/services/data_fusion/async_client.py b/google/cloud/data_fusion_v1/services/data_fusion/async_client.py index 669bfe3..f5e9523 100644 --- a/google/cloud/data_fusion_v1/services/data_fusion/async_client.py +++ b/google/cloud/data_fusion_v1/services/data_fusion/async_client.py @@ -16,15 +16,15 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import pkg_resources try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -33,14 +33,16 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.data_fusion_v1.services.data_fusion import pagers -from google.cloud.data_fusion_v1.types import datafusion from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DataFusionTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DataFusionGrpcAsyncIOTransport + +from google.cloud.data_fusion_v1.services.data_fusion import pagers +from google.cloud.data_fusion_v1.types import datafusion + from .client import DataFusionClient +from .transports.base import DEFAULT_CLIENT_INFO, DataFusionTransport +from .transports.grpc_asyncio import DataFusionGrpcAsyncIOTransport class DataFusionAsyncClient: @@ -218,14 +220,13 @@ async def list_available_versions( r"""Lists possible versions for Data Fusion instances in the specified project and location. - .. code-block:: python from google.cloud import data_fusion_v1 - def sample_list_available_versions(): + async def sample_list_available_versions(): # Create a client - client = data_fusion_v1.DataFusionClient() + client = data_fusion_v1.DataFusionAsyncClient() # Initialize request argument(s) request = data_fusion_v1.ListAvailableVersionsRequest( @@ -236,7 +237,7 @@ def sample_list_available_versions(): page_result = client.list_available_versions(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -299,12 +300,20 @@ def sample_list_available_versions(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListAvailableVersionsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -321,14 +330,13 @@ async def list_instances( r"""Lists Data Fusion instances in the specified project and location. - .. code-block:: python from google.cloud import data_fusion_v1 - def sample_list_instances(): + async def sample_list_instances(): # Create a client - client = data_fusion_v1.DataFusionClient() + client = data_fusion_v1.DataFusionAsyncClient() # Initialize request argument(s) request = data_fusion_v1.ListInstancesRequest( @@ -339,7 +347,7 @@ def sample_list_instances(): page_result = client.list_instances(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -379,12 +387,20 @@ def sample_list_instances(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListInstancesAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -404,9 +420,9 @@ async def get_instance( from google.cloud import data_fusion_v1 - def sample_get_instance(): + async def sample_get_instance(): # Create a client - client = data_fusion_v1.DataFusionClient() + client = data_fusion_v1.DataFusionAsyncClient() # Initialize request argument(s) request = data_fusion_v1.GetInstanceRequest( @@ -414,7 +430,7 @@ def sample_get_instance(): ) # Make the request - response = client.get_instance(request=request) + response = await client.get_instance(request=request) # Handle the response print(response) @@ -451,7 +467,12 @@ def sample_get_instance(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -470,14 +491,13 @@ async def create_instance( r"""Creates a new Data Fusion instance in the specified project and location. - .. code-block:: python from google.cloud import data_fusion_v1 - def sample_create_instance(): + async def sample_create_instance(): # Create a client - client = data_fusion_v1.DataFusionClient() + client = data_fusion_v1.DataFusionAsyncClient() # Initialize request argument(s) request = data_fusion_v1.CreateInstanceRequest( @@ -490,7 +510,7 @@ def sample_create_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -570,7 +590,12 @@ def sample_create_instance(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -598,9 +623,9 @@ async def delete_instance( from google.cloud import data_fusion_v1 - def sample_delete_instance(): + async def sample_delete_instance(): # Create a client - client = data_fusion_v1.DataFusionClient() + client = data_fusion_v1.DataFusionAsyncClient() # Initialize request argument(s) request = data_fusion_v1.DeleteInstanceRequest( @@ -612,7 +637,7 @@ def sample_delete_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -686,7 +711,12 @@ def sample_delete_instance(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -715,9 +745,9 @@ async def update_instance( from google.cloud import data_fusion_v1 - def sample_update_instance(): + async def sample_update_instance(): # Create a client - client = data_fusion_v1.DataFusionClient() + client = data_fusion_v1.DataFusionAsyncClient() # Initialize request argument(s) instance = data_fusion_v1.Instance() @@ -732,7 +762,7 @@ def sample_update_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -817,7 +847,12 @@ def sample_update_instance(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( @@ -841,14 +876,13 @@ async def restart_instance( r"""Restart a single Data Fusion instance. At the end of an operation instance is fully restarted. - .. code-block:: python from google.cloud import data_fusion_v1 - def sample_restart_instance(): + async def sample_restart_instance(): # Create a client - client = data_fusion_v1.DataFusionClient() + client = data_fusion_v1.DataFusionAsyncClient() # Initialize request argument(s) request = data_fusion_v1.RestartInstanceRequest( @@ -860,7 +894,7 @@ def sample_restart_instance(): print("Waiting for operation to complete...") - response = operation.result() + response = await operation.result() # Handle the response print(response) @@ -902,7 +936,12 @@ def sample_restart_instance(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation_async.from_gapic( diff --git a/google/cloud/data_fusion_v1/services/data_fusion/client.py b/google/cloud/data_fusion_v1/services/data_fusion/client.py index 5705c9b..9d61124 100644 --- a/google/cloud/data_fusion_v1/services/data_fusion/client.py +++ b/google/cloud/data_fusion_v1/services/data_fusion/client.py @@ -16,18 +16,18 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import pkg_resources try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] @@ -36,12 +36,14 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore -from google.cloud.data_fusion_v1.services.data_fusion import pagers -from google.cloud.data_fusion_v1.types import datafusion from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DataFusionTransport, DEFAULT_CLIENT_INFO + +from google.cloud.data_fusion_v1.services.data_fusion import pagers +from google.cloud.data_fusion_v1.types import datafusion + +from .transports.base import DEFAULT_CLIENT_INFO, DataFusionTransport from .transports.grpc import DataFusionGrpcTransport from .transports.grpc_asyncio import DataFusionGrpcAsyncIOTransport @@ -58,7 +60,10 @@ class DataFusionClientMeta(type): _transport_registry["grpc"] = DataFusionGrpcTransport _transport_registry["grpc_asyncio"] = DataFusionGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[DataFusionTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[DataFusionTransport]: """Returns an appropriate transport class. Args: @@ -167,7 +172,10 @@ def transport(self) -> DataFusionTransport: @staticmethod def crypto_key_path( - project: str, location: str, key_ring: str, crypto_key: str, + project: str, + location: str, + key_ring: str, + crypto_key: str, ) -> str: """Returns a fully-qualified crypto_key string.""" return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( @@ -187,10 +195,16 @@ def parse_crypto_key_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def instance_path(project: str, location: str, instance: str,) -> str: + def instance_path( + project: str, + location: str, + instance: str, + ) -> str: """Returns a fully-qualified instance string.""" return "projects/{project}/locations/{location}/instances/{instance}".format( - project=project, location=location, instance=instance, + project=project, + location=location, + instance=instance, ) @staticmethod @@ -203,7 +217,9 @@ def parse_instance_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -216,9 +232,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -227,9 +247,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -238,9 +262,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -249,10 +277,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -438,7 +470,6 @@ def list_available_versions( r"""Lists possible versions for Data Fusion instances in the specified project and location. - .. code-block:: python from google.cloud import data_fusion_v1 @@ -519,12 +550,20 @@ def sample_list_available_versions(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListAvailableVersionsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -541,7 +580,6 @@ def list_instances( r"""Lists Data Fusion instances in the specified project and location. - .. code-block:: python from google.cloud import data_fusion_v1 @@ -600,12 +638,20 @@ def sample_list_instances(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListInstancesPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -673,7 +719,12 @@ def sample_get_instance(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -692,7 +743,6 @@ def create_instance( r"""Creates a new Data Fusion instance in the specified project and location. - .. code-block:: python from google.cloud import data_fusion_v1 @@ -792,7 +842,12 @@ def sample_create_instance(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -908,7 +963,12 @@ def sample_delete_instance(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -1039,7 +1099,12 @@ def sample_update_instance(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( @@ -1063,7 +1128,6 @@ def restart_instance( r"""Restart a single Data Fusion instance. At the end of an operation instance is fully restarted. - .. code-block:: python from google.cloud import data_fusion_v1 @@ -1125,7 +1189,12 @@ def sample_restart_instance(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Wrap the response in an operation future. response = operation.from_gapic( diff --git a/google/cloud/data_fusion_v1/services/data_fusion/pagers.py b/google/cloud/data_fusion_v1/services/data_fusion/pagers.py index 55a7704..be08528 100644 --- a/google/cloud/data_fusion_v1/services/data_fusion/pagers.py +++ b/google/cloud/data_fusion_v1/services/data_fusion/pagers.py @@ -18,10 +18,10 @@ AsyncIterator, Awaitable, Callable, + Iterator, + Optional, Sequence, Tuple, - Optional, - Iterator, ) from google.cloud.data_fusion_v1.types import datafusion diff --git a/google/cloud/data_fusion_v1/services/data_fusion/transports/__init__.py b/google/cloud/data_fusion_v1/services/data_fusion/transports/__init__.py index 0e1af2b..6545eef 100644 --- a/google/cloud/data_fusion_v1/services/data_fusion/transports/__init__.py +++ b/google/cloud/data_fusion_v1/services/data_fusion/transports/__init__.py @@ -20,7 +20,6 @@ from .grpc import DataFusionGrpcTransport from .grpc_asyncio import DataFusionGrpcAsyncIOTransport - # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[DataFusionTransport]] _transport_registry["grpc"] = DataFusionGrpcTransport diff --git a/google/cloud/data_fusion_v1/services/data_fusion/transports/base.py b/google/cloud/data_fusion_v1/services/data_fusion/transports/base.py index f9a5b4b..facf451 100644 --- a/google/cloud/data_fusion_v1/services/data_fusion/transports/base.py +++ b/google/cloud/data_fusion_v1/services/data_fusion/transports/base.py @@ -15,19 +15,18 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, operations_v1 from google.api_core import retry as retries -from google.api_core import operations_v1 +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore +import pkg_resources from google.cloud.data_fusion_v1.types import datafusion -from google.longrunning import operations_pb2 # type: ignore try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -82,6 +81,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -128,31 +128,43 @@ def _prep_wrapped_messages(self, client_info): client_info=client_info, ), self.list_instances: gapic_v1.method.wrap_method( - self.list_instances, default_timeout=None, client_info=client_info, + self.list_instances, + default_timeout=None, + client_info=client_info, ), self.get_instance: gapic_v1.method.wrap_method( - self.get_instance, default_timeout=None, client_info=client_info, + self.get_instance, + default_timeout=None, + client_info=client_info, ), self.create_instance: gapic_v1.method.wrap_method( - self.create_instance, default_timeout=None, client_info=client_info, + self.create_instance, + default_timeout=None, + client_info=client_info, ), self.delete_instance: gapic_v1.method.wrap_method( - self.delete_instance, default_timeout=None, client_info=client_info, + self.delete_instance, + default_timeout=None, + client_info=client_info, ), self.update_instance: gapic_v1.method.wrap_method( - self.update_instance, default_timeout=None, client_info=client_info, + self.update_instance, + default_timeout=None, + client_info=client_info, ), self.restart_instance: gapic_v1.method.wrap_method( - self.restart_instance, default_timeout=None, client_info=client_info, + self.restart_instance, + default_timeout=None, + client_info=client_info, ), } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -230,5 +242,9 @@ def restart_instance( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("DataFusionTransport",) diff --git a/google/cloud/data_fusion_v1/services/data_fusion/transports/grpc.py b/google/cloud/data_fusion_v1/services/data_fusion/transports/grpc.py index 46eedb5..cd34423 100644 --- a/google/cloud/data_fusion_v1/services/data_fusion/transports/grpc.py +++ b/google/cloud/data_fusion_v1/services/data_fusion/transports/grpc.py @@ -13,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import grpc_helpers -from google.api_core import operations_v1 -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, grpc_helpers, operations_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from google.cloud.data_fusion_v1.types import datafusion -from google.longrunning import operations_pb2 # type: ignore -from .base import DataFusionTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, DataFusionTransport class DataFusionGrpcTransport(DataFusionTransport): @@ -229,8 +227,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -439,5 +436,9 @@ def restart_instance( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("DataFusionGrpcTransport",) diff --git a/google/cloud/data_fusion_v1/services/data_fusion/transports/grpc_asyncio.py b/google/cloud/data_fusion_v1/services/data_fusion/transports/grpc_asyncio.py index dc25802..b43e53f 100644 --- a/google/cloud/data_fusion_v1/services/data_fusion/transports/grpc_asyncio.py +++ b/google/cloud/data_fusion_v1/services/data_fusion/transports/grpc_asyncio.py @@ -13,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.api_core import operations_v1 +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - +from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.data_fusion_v1.types import datafusion -from google.longrunning import operations_pb2 # type: ignore -from .base import DataFusionTransport, DEFAULT_CLIENT_INFO + +from .base import DEFAULT_CLIENT_INFO, DataFusionTransport from .grpc import DataFusionGrpcTransport diff --git a/google/cloud/data_fusion_v1/types/datafusion.py b/google/cloud/data_fusion_v1/types/datafusion.py index 39028b6..5229553 100644 --- a/google/cloud/data_fusion_v1/types/datafusion.py +++ b/google/cloud/data_fusion_v1/types/datafusion.py @@ -13,11 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore - from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( package="google.cloud.datafusion.v1", @@ -66,8 +64,14 @@ class NetworkConfig(proto.Message): the customer network. """ - network = proto.Field(proto.STRING, number=1,) - ip_allocation = proto.Field(proto.STRING, number=2,) + network = proto.Field( + proto.STRING, + number=1, + ) + ip_allocation = proto.Field( + proto.STRING, + number=2, + ) class Version(proto.Message): @@ -98,10 +102,23 @@ class Type(proto.Enum): TYPE_PREVIEW = 1 TYPE_GENERAL_AVAILABILITY = 2 - version_number = proto.Field(proto.STRING, number=1,) - default_version = proto.Field(proto.BOOL, number=2,) - available_features = proto.RepeatedField(proto.STRING, number=3,) - type_ = proto.Field(proto.ENUM, number=4, enum=Type,) + version_number = proto.Field( + proto.STRING, + number=1, + ) + default_version = proto.Field( + proto.BOOL, + number=2, + ) + available_features = proto.RepeatedField( + proto.STRING, + number=3, + ) + type_ = proto.Field( + proto.ENUM, + number=4, + enum=Type, + ) class Accelerator(proto.Message): @@ -130,8 +147,16 @@ class State(proto.Enum): DISABLED = 2 UNKNOWN = 3 - accelerator_type = proto.Field(proto.ENUM, number=1, enum=AcceleratorType,) - state = proto.Field(proto.ENUM, number=2, enum=State,) + accelerator_type = proto.Field( + proto.ENUM, + number=1, + enum=AcceleratorType, + ) + state = proto.Field( + proto.ENUM, + number=2, + enum=State, + ) class CryptoKeyConfig(proto.Message): @@ -146,7 +171,10 @@ class CryptoKeyConfig(proto.Message): ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. """ - key_reference = proto.Field(proto.STRING, number=1,) + key_reference = proto.Field( + proto.STRING, + number=1, + ) class Instance(proto.Message): @@ -174,12 +202,12 @@ class Instance(proto.Message): Network configuration options. These are required when a private Data Fusion instance is to be created. - labels (Sequence[google.cloud.data_fusion_v1.types.Instance.LabelsEntry]): + labels (Mapping[str, str]): The resource labels for instance to use to annotate any related underlying resources such as Compute Engine VMs. The character '=' is not allowed to be used within the labels. - options (Sequence[google.cloud.data_fusion_v1.types.Instance.OptionsEntry]): + options (Mapping[str, str]): Map of additional options used to configure the behavior of Data Fusion instance. create_time (google.protobuf.timestamp_pb2.Timestamp): @@ -276,42 +304,129 @@ class DisabledReason(proto.Enum): DISABLED_REASON_UNSPECIFIED = 0 KMS_KEY_ISSUE = 1 - name = proto.Field(proto.STRING, number=1,) - description = proto.Field(proto.STRING, number=2,) - type_ = proto.Field(proto.ENUM, number=3, enum=Type,) - enable_stackdriver_logging = proto.Field(proto.BOOL, number=4,) - enable_stackdriver_monitoring = proto.Field(proto.BOOL, number=5,) - private_instance = proto.Field(proto.BOOL, number=6,) - network_config = proto.Field(proto.MESSAGE, number=7, message="NetworkConfig",) - labels = proto.MapField(proto.STRING, proto.STRING, number=8,) - options = proto.MapField(proto.STRING, proto.STRING, number=9,) + name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=2, + ) + type_ = proto.Field( + proto.ENUM, + number=3, + enum=Type, + ) + enable_stackdriver_logging = proto.Field( + proto.BOOL, + number=4, + ) + enable_stackdriver_monitoring = proto.Field( + proto.BOOL, + number=5, + ) + private_instance = proto.Field( + proto.BOOL, + number=6, + ) + network_config = proto.Field( + proto.MESSAGE, + number=7, + message="NetworkConfig", + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) + options = proto.MapField( + proto.STRING, + proto.STRING, + number=9, + ) create_time = proto.Field( - proto.MESSAGE, number=10, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, ) update_time = proto.Field( - proto.MESSAGE, number=11, message=timestamp_pb2.Timestamp, - ) - state = proto.Field(proto.ENUM, number=12, enum=State,) - state_message = proto.Field(proto.STRING, number=13,) - service_endpoint = proto.Field(proto.STRING, number=14,) - zone = proto.Field(proto.STRING, number=15,) - version = proto.Field(proto.STRING, number=16,) - service_account = proto.Field(proto.STRING, number=17,) - display_name = proto.Field(proto.STRING, number=18,) + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + state = proto.Field( + proto.ENUM, + number=12, + enum=State, + ) + state_message = proto.Field( + proto.STRING, + number=13, + ) + service_endpoint = proto.Field( + proto.STRING, + number=14, + ) + zone = proto.Field( + proto.STRING, + number=15, + ) + version = proto.Field( + proto.STRING, + number=16, + ) + service_account = proto.Field( + proto.STRING, + number=17, + ) + display_name = proto.Field( + proto.STRING, + number=18, + ) available_version = proto.RepeatedField( - proto.MESSAGE, number=19, message="Version", - ) - api_endpoint = proto.Field(proto.STRING, number=20,) - gcs_bucket = proto.Field(proto.STRING, number=21,) - accelerators = proto.RepeatedField(proto.MESSAGE, number=22, message="Accelerator",) - p4_service_account = proto.Field(proto.STRING, number=23,) - tenant_project_id = proto.Field(proto.STRING, number=24,) - dataproc_service_account = proto.Field(proto.STRING, number=25,) - enable_rbac = proto.Field(proto.BOOL, number=27,) + proto.MESSAGE, + number=19, + message="Version", + ) + api_endpoint = proto.Field( + proto.STRING, + number=20, + ) + gcs_bucket = proto.Field( + proto.STRING, + number=21, + ) + accelerators = proto.RepeatedField( + proto.MESSAGE, + number=22, + message="Accelerator", + ) + p4_service_account = proto.Field( + proto.STRING, + number=23, + ) + tenant_project_id = proto.Field( + proto.STRING, + number=24, + ) + dataproc_service_account = proto.Field( + proto.STRING, + number=25, + ) + enable_rbac = proto.Field( + proto.BOOL, + number=27, + ) crypto_key_config = proto.Field( - proto.MESSAGE, number=28, message="CryptoKeyConfig", + proto.MESSAGE, + number=28, + message="CryptoKeyConfig", + ) + disabled_reason = proto.RepeatedField( + proto.ENUM, + number=29, + enum=DisabledReason, ) - disabled_reason = proto.RepeatedField(proto.ENUM, number=29, enum=DisabledReason,) class ListInstancesRequest(proto.Message): @@ -337,11 +452,26 @@ class ListInstancesRequest(proto.Message): "name desc", or "" (unsorted). """ - parent = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) - filter = proto.Field(proto.STRING, number=4,) - order_by = proto.Field(proto.STRING, number=5,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + filter = proto.Field( + proto.STRING, + number=4, + ) + order_by = proto.Field( + proto.STRING, + number=5, + ) class ListInstancesResponse(proto.Message): @@ -361,9 +491,19 @@ class ListInstancesResponse(proto.Message): def raw_page(self): return self - instances = proto.RepeatedField(proto.MESSAGE, number=1, message="Instance",) - next_page_token = proto.Field(proto.STRING, number=2,) - unreachable = proto.RepeatedField(proto.STRING, number=3,) + instances = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Instance", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + unreachable = proto.RepeatedField( + proto.STRING, + number=3, + ) class ListAvailableVersionsRequest(proto.Message): @@ -386,10 +526,22 @@ class ListAvailableVersionsRequest(proto.Message): then response will be [6.1.2, 6.2.0] """ - parent = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) - latest_patch_only = proto.Field(proto.BOOL, number=4,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + latest_patch_only = proto.Field( + proto.BOOL, + number=4, + ) class ListAvailableVersionsResponse(proto.Message): @@ -409,9 +561,14 @@ def raw_page(self): return self available_versions = proto.RepeatedField( - proto.MESSAGE, number=1, message="Version", + proto.MESSAGE, + number=1, + message="Version", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, ) - next_page_token = proto.Field(proto.STRING, number=2,) class GetInstanceRequest(proto.Message): @@ -425,7 +582,10 @@ class GetInstanceRequest(proto.Message): projects/{project}/locations/{location}/instances/{instance}. """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class CreateInstanceRequest(proto.Message): @@ -442,9 +602,19 @@ class CreateInstanceRequest(proto.Message): An instance resource. """ - parent = proto.Field(proto.STRING, number=1,) - instance_id = proto.Field(proto.STRING, number=2,) - instance = proto.Field(proto.MESSAGE, number=3, message="Instance",) + parent = proto.Field( + proto.STRING, + number=1, + ) + instance_id = proto.Field( + proto.STRING, + number=2, + ) + instance = proto.Field( + proto.MESSAGE, + number=3, + message="Instance", + ) class DeleteInstanceRequest(proto.Message): @@ -457,7 +627,10 @@ class DeleteInstanceRequest(proto.Message): projects/{project}/locations/{location}/instances/{instance} """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class UpdateInstanceRequest(proto.Message): @@ -482,9 +655,15 @@ class UpdateInstanceRequest(proto.Message): overwritten. """ - instance = proto.Field(proto.MESSAGE, number=1, message="Instance",) + instance = proto.Field( + proto.MESSAGE, + number=1, + message="Instance", + ) update_mask = proto.Field( - proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, ) @@ -498,7 +677,10 @@ class RestartInstanceRequest(proto.Message): projects/{project}/locations/{location}/instances/{instance} """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class OperationMetadata(proto.Message): @@ -525,7 +707,7 @@ class OperationMetadata(proto.Message): corresponding to ``Code.CANCELLED``. api_version (str): API version used to start the operation. - additional_status (Sequence[google.cloud.data_fusion_v1.types.OperationMetadata.AdditionalStatusEntry]): + additional_status (Mapping[str, str]): Map to hold any additional status info for the operation If there is an accelerator being enabled/disabled/deleted, this will be populated @@ -533,14 +715,41 @@ class OperationMetadata(proto.Message): ENABLING, DISABLING or DELETING """ - create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,) - target = proto.Field(proto.STRING, number=3,) - verb = proto.Field(proto.STRING, number=4,) - status_detail = proto.Field(proto.STRING, number=5,) - requested_cancellation = proto.Field(proto.BOOL, number=6,) - api_version = proto.Field(proto.STRING, number=7,) - additional_status = proto.MapField(proto.STRING, proto.STRING, number=8,) + create_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + target = proto.Field( + proto.STRING, + number=3, + ) + verb = proto.Field( + proto.STRING, + number=4, + ) + status_detail = proto.Field( + proto.STRING, + number=5, + ) + requested_cancellation = proto.Field( + proto.BOOL, + number=6, + ) + api_version = proto.Field( + proto.STRING, + number=7, + ) + additional_status = proto.MapField( + proto.STRING, + proto.STRING, + number=8, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/noxfile.py b/noxfile.py index 2a2001c..7c1742d 100644 --- a/noxfile.py +++ b/noxfile.py @@ -17,19 +17,45 @@ # Generated by synthtool. DO NOT EDIT! from __future__ import absolute_import + import os import pathlib import shutil +import warnings import nox - -BLACK_VERSION = "black==19.10b0" -BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] + UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -57,7 +83,9 @@ def lint(session): """ session.install("flake8", BLACK_VERSION) session.run( - "black", "--check", *BLACK_PATHS, + "black", + "--check", + *LINT_PATHS, ) session.run("flake8", "google", "tests") @@ -67,7 +95,28 @@ def blacken(session): """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( - "black", *BLACK_PATHS, + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, ) @@ -78,23 +127,41 @@ def lint_setup_py(session): session.run("python", "setup.py", "check", "--restructuredtext", "--strict") +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + def default(session): # Install all test dependencies, then install this package in-place. constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install( - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", - "-c", - constraints_path, - ) - - session.install("-e", ".", "-c", constraints_path) + install_unittest_dependencies(session, "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -118,6 +185,35 @@ def unit(session): default(session) +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" @@ -140,13 +236,7 @@ def system(session): if not system_test_exists and not system_test_folder_exists: session.skip("System tests were not found") - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) - session.install("-e", ".", "-c", constraints_path) + install_systemtest_dependencies(session, "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: diff --git a/samples/generated_samples/snippet_metadata_data fusion_v1.json b/samples/generated_samples/snippet_metadata_data fusion_v1.json index 0f0a0b3..2704a2e 100644 --- a/samples/generated_samples/snippet_metadata_data fusion_v1.json +++ b/samples/generated_samples/snippet_metadata_data fusion_v1.json @@ -1,16 +1,69 @@ { + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.datafusion.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-data-fusion" + }, "snippets": [ { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.data_fusion_v1.DataFusionAsyncClient", + "shortName": "DataFusionAsyncClient" + }, + "fullName": "google.cloud.data_fusion_v1.DataFusionAsyncClient.create_instance", "method": { + "fullName": "google.cloud.datafusion.v1.DataFusion.CreateInstance", "service": { + "fullName": "google.cloud.datafusion.v1.DataFusion", "shortName": "DataFusion" }, "shortName": "CreateInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.data_fusion_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.data_fusion_v1.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_instance" }, + "description": "Sample for CreateInstance", "file": "datafusion_v1_generated_data_fusion_create_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "datafusion_v1_generated_DataFusion_CreateInstance_async", "segments": [ { @@ -43,18 +96,62 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "datafusion_v1_generated_data_fusion_create_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.data_fusion_v1.DataFusionClient", + "shortName": "DataFusionClient" + }, + "fullName": "google.cloud.data_fusion_v1.DataFusionClient.create_instance", "method": { + "fullName": "google.cloud.datafusion.v1.DataFusion.CreateInstance", "service": { + "fullName": "google.cloud.datafusion.v1.DataFusion", "shortName": "DataFusion" }, "shortName": "CreateInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.data_fusion_v1.types.CreateInstanceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "instance", + "type": "google.cloud.data_fusion_v1.types.Instance" + }, + { + "name": "instance_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_instance" }, + "description": "Sample for CreateInstance", "file": "datafusion_v1_generated_data_fusion_create_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "datafusion_v1_generated_DataFusion_CreateInstance_sync", "segments": [ { @@ -87,19 +184,55 @@ "start": 47, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "datafusion_v1_generated_data_fusion_create_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.data_fusion_v1.DataFusionAsyncClient", + "shortName": "DataFusionAsyncClient" + }, + "fullName": "google.cloud.data_fusion_v1.DataFusionAsyncClient.delete_instance", "method": { + "fullName": "google.cloud.datafusion.v1.DataFusion.DeleteInstance", "service": { + "fullName": "google.cloud.datafusion.v1.DataFusion", "shortName": "DataFusion" }, "shortName": "DeleteInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.data_fusion_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_instance" }, + "description": "Sample for DeleteInstance", "file": "datafusion_v1_generated_data_fusion_delete_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "datafusion_v1_generated_DataFusion_DeleteInstance_async", "segments": [ { @@ -132,18 +265,54 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "datafusion_v1_generated_data_fusion_delete_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.data_fusion_v1.DataFusionClient", + "shortName": "DataFusionClient" + }, + "fullName": "google.cloud.data_fusion_v1.DataFusionClient.delete_instance", "method": { + "fullName": "google.cloud.datafusion.v1.DataFusion.DeleteInstance", "service": { + "fullName": "google.cloud.datafusion.v1.DataFusion", "shortName": "DataFusion" }, "shortName": "DeleteInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.data_fusion_v1.types.DeleteInstanceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_instance" }, + "description": "Sample for DeleteInstance", "file": "datafusion_v1_generated_data_fusion_delete_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "datafusion_v1_generated_DataFusion_DeleteInstance_sync", "segments": [ { @@ -176,19 +345,51 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "datafusion_v1_generated_data_fusion_delete_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.data_fusion_v1.DataFusionAsyncClient", + "shortName": "DataFusionAsyncClient" + }, + "fullName": "google.cloud.data_fusion_v1.DataFusionAsyncClient.get_instance", "method": { + "fullName": "google.cloud.datafusion.v1.DataFusion.GetInstance", "service": { + "fullName": "google.cloud.datafusion.v1.DataFusion", "shortName": "DataFusion" }, "shortName": "GetInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.data_fusion_v1.types.GetInstanceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.data_fusion_v1.types.Instance", + "shortName": "get_instance" }, + "description": "Sample for GetInstance", "file": "datafusion_v1_generated_data_fusion_get_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "datafusion_v1_generated_DataFusion_GetInstance_async", "segments": [ { @@ -221,18 +422,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "datafusion_v1_generated_data_fusion_get_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.data_fusion_v1.DataFusionClient", + "shortName": "DataFusionClient" + }, + "fullName": "google.cloud.data_fusion_v1.DataFusionClient.get_instance", "method": { + "fullName": "google.cloud.datafusion.v1.DataFusion.GetInstance", "service": { + "fullName": "google.cloud.datafusion.v1.DataFusion", "shortName": "DataFusion" }, "shortName": "GetInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.data_fusion_v1.types.GetInstanceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.data_fusion_v1.types.Instance", + "shortName": "get_instance" }, + "description": "Sample for GetInstance", "file": "datafusion_v1_generated_data_fusion_get_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "datafusion_v1_generated_DataFusion_GetInstance_sync", "segments": [ { @@ -265,19 +498,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "datafusion_v1_generated_data_fusion_get_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.data_fusion_v1.DataFusionAsyncClient", + "shortName": "DataFusionAsyncClient" + }, + "fullName": "google.cloud.data_fusion_v1.DataFusionAsyncClient.list_available_versions", "method": { + "fullName": "google.cloud.datafusion.v1.DataFusion.ListAvailableVersions", "service": { + "fullName": "google.cloud.datafusion.v1.DataFusion", "shortName": "DataFusion" }, "shortName": "ListAvailableVersions" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.data_fusion_v1.types.ListAvailableVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.data_fusion_v1.services.data_fusion.pagers.ListAvailableVersionsAsyncPager", + "shortName": "list_available_versions" }, + "description": "Sample for ListAvailableVersions", "file": "datafusion_v1_generated_data_fusion_list_available_versions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "datafusion_v1_generated_DataFusion_ListAvailableVersions_async", "segments": [ { @@ -310,18 +579,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "datafusion_v1_generated_data_fusion_list_available_versions_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.data_fusion_v1.DataFusionClient", + "shortName": "DataFusionClient" + }, + "fullName": "google.cloud.data_fusion_v1.DataFusionClient.list_available_versions", "method": { + "fullName": "google.cloud.datafusion.v1.DataFusion.ListAvailableVersions", "service": { + "fullName": "google.cloud.datafusion.v1.DataFusion", "shortName": "DataFusion" }, "shortName": "ListAvailableVersions" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.data_fusion_v1.types.ListAvailableVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.data_fusion_v1.services.data_fusion.pagers.ListAvailableVersionsPager", + "shortName": "list_available_versions" }, + "description": "Sample for ListAvailableVersions", "file": "datafusion_v1_generated_data_fusion_list_available_versions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "datafusion_v1_generated_DataFusion_ListAvailableVersions_sync", "segments": [ { @@ -354,19 +659,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "datafusion_v1_generated_data_fusion_list_available_versions_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.data_fusion_v1.DataFusionAsyncClient", + "shortName": "DataFusionAsyncClient" + }, + "fullName": "google.cloud.data_fusion_v1.DataFusionAsyncClient.list_instances", "method": { + "fullName": "google.cloud.datafusion.v1.DataFusion.ListInstances", "service": { + "fullName": "google.cloud.datafusion.v1.DataFusion", "shortName": "DataFusion" }, "shortName": "ListInstances" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.data_fusion_v1.types.ListInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.data_fusion_v1.services.data_fusion.pagers.ListInstancesAsyncPager", + "shortName": "list_instances" }, + "description": "Sample for ListInstances", "file": "datafusion_v1_generated_data_fusion_list_instances_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "datafusion_v1_generated_DataFusion_ListInstances_async", "segments": [ { @@ -399,18 +736,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "datafusion_v1_generated_data_fusion_list_instances_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.data_fusion_v1.DataFusionClient", + "shortName": "DataFusionClient" + }, + "fullName": "google.cloud.data_fusion_v1.DataFusionClient.list_instances", "method": { + "fullName": "google.cloud.datafusion.v1.DataFusion.ListInstances", "service": { + "fullName": "google.cloud.datafusion.v1.DataFusion", "shortName": "DataFusion" }, "shortName": "ListInstances" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.data_fusion_v1.types.ListInstancesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.data_fusion_v1.services.data_fusion.pagers.ListInstancesPager", + "shortName": "list_instances" }, + "description": "Sample for ListInstances", "file": "datafusion_v1_generated_data_fusion_list_instances_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "datafusion_v1_generated_DataFusion_ListInstances_sync", "segments": [ { @@ -443,19 +812,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "datafusion_v1_generated_data_fusion_list_instances_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.data_fusion_v1.DataFusionAsyncClient", + "shortName": "DataFusionAsyncClient" + }, + "fullName": "google.cloud.data_fusion_v1.DataFusionAsyncClient.restart_instance", "method": { + "fullName": "google.cloud.datafusion.v1.DataFusion.RestartInstance", "service": { + "fullName": "google.cloud.datafusion.v1.DataFusion", "shortName": "DataFusion" }, "shortName": "RestartInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.data_fusion_v1.types.RestartInstanceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restart_instance" }, + "description": "Sample for RestartInstance", "file": "datafusion_v1_generated_data_fusion_restart_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "datafusion_v1_generated_DataFusion_RestartInstance_async", "segments": [ { @@ -488,18 +889,50 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "datafusion_v1_generated_data_fusion_restart_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.data_fusion_v1.DataFusionClient", + "shortName": "DataFusionClient" + }, + "fullName": "google.cloud.data_fusion_v1.DataFusionClient.restart_instance", "method": { + "fullName": "google.cloud.datafusion.v1.DataFusion.RestartInstance", "service": { + "fullName": "google.cloud.datafusion.v1.DataFusion", "shortName": "DataFusion" }, "shortName": "RestartInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.data_fusion_v1.types.RestartInstanceRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "restart_instance" }, + "description": "Sample for RestartInstance", "file": "datafusion_v1_generated_data_fusion_restart_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "datafusion_v1_generated_DataFusion_RestartInstance_sync", "segments": [ { @@ -532,19 +965,59 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "datafusion_v1_generated_data_fusion_restart_instance_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.data_fusion_v1.DataFusionAsyncClient", + "shortName": "DataFusionAsyncClient" + }, + "fullName": "google.cloud.data_fusion_v1.DataFusionAsyncClient.update_instance", "method": { + "fullName": "google.cloud.datafusion.v1.DataFusion.UpdateInstance", "service": { + "fullName": "google.cloud.datafusion.v1.DataFusion", "shortName": "DataFusion" }, "shortName": "UpdateInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.data_fusion_v1.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.data_fusion_v1.types.Instance" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_instance" }, + "description": "Sample for UpdateInstance", "file": "datafusion_v1_generated_data_fusion_update_instance_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "datafusion_v1_generated_DataFusion_UpdateInstance_async", "segments": [ { @@ -577,18 +1050,58 @@ "start": 49, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "datafusion_v1_generated_data_fusion_update_instance_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.data_fusion_v1.DataFusionClient", + "shortName": "DataFusionClient" + }, + "fullName": "google.cloud.data_fusion_v1.DataFusionClient.update_instance", "method": { + "fullName": "google.cloud.datafusion.v1.DataFusion.UpdateInstance", "service": { + "fullName": "google.cloud.datafusion.v1.DataFusion", "shortName": "DataFusion" }, "shortName": "UpdateInstance" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.data_fusion_v1.types.UpdateInstanceRequest" + }, + { + "name": "instance", + "type": "google.cloud.data_fusion_v1.types.Instance" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_instance" }, + "description": "Sample for UpdateInstance", "file": "datafusion_v1_generated_data_fusion_update_instance_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "datafusion_v1_generated_DataFusion_UpdateInstance_sync", "segments": [ { @@ -621,7 +1134,8 @@ "start": 49, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "datafusion_v1_generated_data_fusion_update_instance_sync.py" } ] } diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py index d309d6e..91b5967 100644 --- a/scripts/readme-gen/readme_gen.py +++ b/scripts/readme-gen/readme_gen.py @@ -28,7 +28,10 @@ jinja_env = jinja2.Environment( trim_blocks=True, loader=jinja2.FileSystemLoader( - os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) README_TMPL = jinja_env.get_template('README.tmpl.rst') diff --git a/setup.py b/setup.py index 63267ac..7094e46 100644 --- a/setup.py +++ b/setup.py @@ -19,10 +19,9 @@ import setuptools - name = "google-cloud-data-fusion" description = "Cloud Data Fusion API client library" -version = "1.4.1" +version = "1.4.2" release_status = "Development Status :: 5 - Production/Stable" url = "https://github.com/googleapis/python-data-fusion" dependencies = [ @@ -30,7 +29,8 @@ # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", - "proto-plus >= 1.15.0", + "proto-plus >= 1.15.0, <2.0.0dev", + "protobuf >= 3.19.0, <4.0.0dev", ] package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index be5a64f..786e637 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -6,3 +6,4 @@ # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.31.5 proto-plus==1.15.0 +protobuf==3.19.0 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index e69de29..786e637 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.31.5 +proto-plus==1.15.0 +protobuf==3.19.0 diff --git a/tests/unit/gapic/data_fusion_v1/test_data_fusion.py b/tests/unit/gapic/data_fusion_v1/test_data_fusion.py index 27ed703..8c08f75 100644 --- a/tests/unit/gapic/data_fusion_v1/test_data_fusion.py +++ b/tests/unit/gapic/data_fusion_v1/test_data_fusion.py @@ -14,37 +14,47 @@ # limitations under the License. # import os -import mock -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock +import math +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) from google.api_core import client_options from google.api_core import exceptions as core_exceptions -from google.api_core import future -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import operation from google.api_core import operation_async # type: ignore -from google.api_core import operations_v1 -from google.api_core import path_template +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.data_fusion_v1.services.data_fusion import DataFusionAsyncClient -from google.cloud.data_fusion_v1.services.data_fusion import DataFusionClient -from google.cloud.data_fusion_v1.services.data_fusion import pagers -from google.cloud.data_fusion_v1.services.data_fusion import transports -from google.cloud.data_fusion_v1.types import datafusion from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore -import google.auth +import grpc +from grpc.experimental import aio +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest + +from google.cloud.data_fusion_v1.services.data_fusion import ( + DataFusionAsyncClient, + DataFusionClient, + pagers, + transports, +) +from google.cloud.data_fusion_v1.types import datafusion def client_cert_source_callback(): @@ -88,19 +98,25 @@ def test__get_default_mtls_endpoint(): assert DataFusionClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [DataFusionClient, DataFusionAsyncClient,]) -def test_data_fusion_client_from_service_account_info(client_class): +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DataFusionClient, "grpc"), + (DataFusionAsyncClient, "grpc_asyncio"), + ], +) +def test_data_fusion_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "datafusion.googleapis.com:443" + assert client.transport._host == ("datafusion.googleapis.com:443") @pytest.mark.parametrize( @@ -128,22 +144,32 @@ def test_data_fusion_client_service_account_always_use_jwt( use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class", [DataFusionClient, DataFusionAsyncClient,]) -def test_data_fusion_client_from_service_account_file(client_class): +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DataFusionClient, "grpc"), + (DataFusionAsyncClient, "grpc_asyncio"), + ], +) +def test_data_fusion_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "datafusion.googleapis.com:443" + assert client.transport._host == ("datafusion.googleapis.com:443") def test_data_fusion_client_get_transport_class(): @@ -483,7 +509,9 @@ def test_data_fusion_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -612,11 +640,16 @@ def test_data_fusion_client_create_channel_credentials_file( @pytest.mark.parametrize( - "request_type", [datafusion.ListAvailableVersionsRequest, dict,] + "request_type", + [ + datafusion.ListAvailableVersionsRequest, + dict, + ], ) def test_list_available_versions(request_type, transport: str = "grpc"): client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -647,7 +680,8 @@ def test_list_available_versions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -666,7 +700,8 @@ async def test_list_available_versions_async( request_type=datafusion.ListAvailableVersionsRequest, ): client = DataFusionAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -701,13 +736,15 @@ async def test_list_available_versions_async_from_dict(): def test_list_available_versions_field_headers(): - client = DataFusionClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datafusion.ListAvailableVersionsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -723,18 +760,23 @@ def test_list_available_versions_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_available_versions_field_headers_async(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datafusion.ListAvailableVersionsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -752,11 +794,16 @@ async def test_list_available_versions_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_available_versions_flattened(): - client = DataFusionClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -766,7 +813,9 @@ def test_list_available_versions_flattened(): call.return_value = datafusion.ListAvailableVersionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_available_versions(parent="parent_value",) + client.list_available_versions( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -778,19 +827,24 @@ def test_list_available_versions_flattened(): def test_list_available_versions_flattened_error(): - client = DataFusionClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_available_versions( - datafusion.ListAvailableVersionsRequest(), parent="parent_value", + datafusion.ListAvailableVersionsRequest(), + parent="parent_value", ) @pytest.mark.asyncio async def test_list_available_versions_flattened_async(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -804,7 +858,9 @@ async def test_list_available_versions_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_available_versions(parent="parent_value",) + response = await client.list_available_versions( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -817,19 +873,23 @@ async def test_list_available_versions_flattened_async(): @pytest.mark.asyncio async def test_list_available_versions_flattened_error_async(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_available_versions( - datafusion.ListAvailableVersionsRequest(), parent="parent_value", + datafusion.ListAvailableVersionsRequest(), + parent="parent_value", ) def test_list_available_versions_pager(transport_name: str = "grpc"): client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -847,13 +907,20 @@ def test_list_available_versions_pager(transport_name: str = "grpc"): next_page_token="abc", ), datafusion.ListAvailableVersionsResponse( - available_versions=[], next_page_token="def", + available_versions=[], + next_page_token="def", ), datafusion.ListAvailableVersionsResponse( - available_versions=[datafusion.Version(),], next_page_token="ghi", + available_versions=[ + datafusion.Version(), + ], + next_page_token="ghi", ), datafusion.ListAvailableVersionsResponse( - available_versions=[datafusion.Version(), datafusion.Version(),], + available_versions=[ + datafusion.Version(), + datafusion.Version(), + ], ), RuntimeError, ) @@ -866,14 +933,15 @@ def test_list_available_versions_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, datafusion.Version) for i in results) def test_list_available_versions_pages(transport_name: str = "grpc"): client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -891,13 +959,20 @@ def test_list_available_versions_pages(transport_name: str = "grpc"): next_page_token="abc", ), datafusion.ListAvailableVersionsResponse( - available_versions=[], next_page_token="def", + available_versions=[], + next_page_token="def", ), datafusion.ListAvailableVersionsResponse( - available_versions=[datafusion.Version(),], next_page_token="ghi", + available_versions=[ + datafusion.Version(), + ], + next_page_token="ghi", ), datafusion.ListAvailableVersionsResponse( - available_versions=[datafusion.Version(), datafusion.Version(),], + available_versions=[ + datafusion.Version(), + datafusion.Version(), + ], ), RuntimeError, ) @@ -908,7 +983,9 @@ def test_list_available_versions_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_available_versions_async_pager(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -927,20 +1004,29 @@ async def test_list_available_versions_async_pager(): next_page_token="abc", ), datafusion.ListAvailableVersionsResponse( - available_versions=[], next_page_token="def", + available_versions=[], + next_page_token="def", ), datafusion.ListAvailableVersionsResponse( - available_versions=[datafusion.Version(),], next_page_token="ghi", + available_versions=[ + datafusion.Version(), + ], + next_page_token="ghi", ), datafusion.ListAvailableVersionsResponse( - available_versions=[datafusion.Version(), datafusion.Version(),], + available_versions=[ + datafusion.Version(), + datafusion.Version(), + ], ), RuntimeError, ) - async_pager = await client.list_available_versions(request={},) + async_pager = await client.list_available_versions( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -949,7 +1035,9 @@ async def test_list_available_versions_async_pager(): @pytest.mark.asyncio async def test_list_available_versions_async_pages(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -968,27 +1056,43 @@ async def test_list_available_versions_async_pages(): next_page_token="abc", ), datafusion.ListAvailableVersionsResponse( - available_versions=[], next_page_token="def", + available_versions=[], + next_page_token="def", ), datafusion.ListAvailableVersionsResponse( - available_versions=[datafusion.Version(),], next_page_token="ghi", + available_versions=[ + datafusion.Version(), + ], + next_page_token="ghi", ), datafusion.ListAvailableVersionsResponse( - available_versions=[datafusion.Version(), datafusion.Version(),], + available_versions=[ + datafusion.Version(), + datafusion.Version(), + ], ), RuntimeError, ) pages = [] - async for page_ in (await client.list_available_versions(request={})).pages: + async for page_ in ( + await client.list_available_versions(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [datafusion.ListInstancesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datafusion.ListInstancesRequest, + dict, + ], +) def test_list_instances(request_type, transport: str = "grpc"): client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -999,7 +1103,8 @@ def test_list_instances(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.list_instances), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = datafusion.ListInstancesResponse( - next_page_token="next_page_token_value", unreachable=["unreachable_value"], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_instances(request) @@ -1018,7 +1123,8 @@ def test_list_instances_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1034,7 +1140,8 @@ async def test_list_instances_async( transport: str = "grpc_asyncio", request_type=datafusion.ListInstancesRequest ): client = DataFusionAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1069,13 +1176,15 @@ async def test_list_instances_async_from_dict(): def test_list_instances_field_headers(): - client = DataFusionClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datafusion.ListInstancesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -1089,18 +1198,23 @@ def test_list_instances_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_instances_field_headers_async(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datafusion.ListInstancesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_instances), "__call__") as call: @@ -1116,12 +1230,16 @@ async def test_list_instances_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_instances_pager(transport_name: str = "grpc"): client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1136,12 +1254,21 @@ def test_list_instances_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - datafusion.ListInstancesResponse(instances=[], next_page_token="def",), datafusion.ListInstancesResponse( - instances=[datafusion.Instance(),], next_page_token="ghi", + instances=[], + next_page_token="def", + ), + datafusion.ListInstancesResponse( + instances=[ + datafusion.Instance(), + ], + next_page_token="ghi", ), datafusion.ListInstancesResponse( - instances=[datafusion.Instance(), datafusion.Instance(),], + instances=[ + datafusion.Instance(), + datafusion.Instance(), + ], ), RuntimeError, ) @@ -1154,14 +1281,15 @@ def test_list_instances_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, datafusion.Instance) for i in results) def test_list_instances_pages(transport_name: str = "grpc"): client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1176,12 +1304,21 @@ def test_list_instances_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - datafusion.ListInstancesResponse(instances=[], next_page_token="def",), datafusion.ListInstancesResponse( - instances=[datafusion.Instance(),], next_page_token="ghi", + instances=[], + next_page_token="def", + ), + datafusion.ListInstancesResponse( + instances=[ + datafusion.Instance(), + ], + next_page_token="ghi", ), datafusion.ListInstancesResponse( - instances=[datafusion.Instance(), datafusion.Instance(),], + instances=[ + datafusion.Instance(), + datafusion.Instance(), + ], ), RuntimeError, ) @@ -1192,7 +1329,9 @@ def test_list_instances_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_instances_async_pager(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1208,19 +1347,30 @@ async def test_list_instances_async_pager(): ], next_page_token="abc", ), - datafusion.ListInstancesResponse(instances=[], next_page_token="def",), datafusion.ListInstancesResponse( - instances=[datafusion.Instance(),], next_page_token="ghi", + instances=[], + next_page_token="def", + ), + datafusion.ListInstancesResponse( + instances=[ + datafusion.Instance(), + ], + next_page_token="ghi", ), datafusion.ListInstancesResponse( - instances=[datafusion.Instance(), datafusion.Instance(),], + instances=[ + datafusion.Instance(), + datafusion.Instance(), + ], ), RuntimeError, ) - async_pager = await client.list_instances(request={},) + async_pager = await client.list_instances( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1229,7 +1379,9 @@ async def test_list_instances_async_pager(): @pytest.mark.asyncio async def test_list_instances_async_pages(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1245,26 +1397,44 @@ async def test_list_instances_async_pages(): ], next_page_token="abc", ), - datafusion.ListInstancesResponse(instances=[], next_page_token="def",), datafusion.ListInstancesResponse( - instances=[datafusion.Instance(),], next_page_token="ghi", + instances=[], + next_page_token="def", + ), + datafusion.ListInstancesResponse( + instances=[ + datafusion.Instance(), + ], + next_page_token="ghi", ), datafusion.ListInstancesResponse( - instances=[datafusion.Instance(), datafusion.Instance(),], + instances=[ + datafusion.Instance(), + datafusion.Instance(), + ], ), RuntimeError, ) pages = [] - async for page_ in (await client.list_instances(request={})).pages: + async for page_ in ( + await client.list_instances(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [datafusion.GetInstanceRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datafusion.GetInstanceRequest, + dict, + ], +) def test_get_instance(request_type, transport: str = "grpc"): client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1333,7 +1503,8 @@ def test_get_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1349,7 +1520,8 @@ async def test_get_instance_async( transport: str = "grpc_asyncio", request_type=datafusion.GetInstanceRequest ): client = DataFusionAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1422,13 +1594,15 @@ async def test_get_instance_async_from_dict(): def test_get_instance_field_headers(): - client = DataFusionClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datafusion.GetInstanceRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: @@ -1442,18 +1616,23 @@ def test_get_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_instance_field_headers_async(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datafusion.GetInstanceRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_instance), "__call__") as call: @@ -1467,13 +1646,23 @@ async def test_get_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [datafusion.CreateInstanceRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datafusion.CreateInstanceRequest, + dict, + ], +) def test_create_instance(request_type, transport: str = "grpc"): client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1499,7 +1688,8 @@ def test_create_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1515,7 +1705,8 @@ async def test_create_instance_async( transport: str = "grpc_asyncio", request_type=datafusion.CreateInstanceRequest ): client = DataFusionAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1545,13 +1736,15 @@ async def test_create_instance_async_from_dict(): def test_create_instance_field_headers(): - client = DataFusionClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datafusion.CreateInstanceRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: @@ -1565,18 +1758,23 @@ def test_create_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_create_instance_field_headers_async(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datafusion.CreateInstanceRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: @@ -1592,11 +1790,16 @@ async def test_create_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_instance_flattened(): - client = DataFusionClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: @@ -1626,7 +1829,9 @@ def test_create_instance_flattened(): def test_create_instance_flattened_error(): - client = DataFusionClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1641,7 +1846,9 @@ def test_create_instance_flattened_error(): @pytest.mark.asyncio async def test_create_instance_flattened_async(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_instance), "__call__") as call: @@ -1676,7 +1883,9 @@ async def test_create_instance_flattened_async(): @pytest.mark.asyncio async def test_create_instance_flattened_error_async(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1689,10 +1898,17 @@ async def test_create_instance_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datafusion.DeleteInstanceRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datafusion.DeleteInstanceRequest, + dict, + ], +) def test_delete_instance(request_type, transport: str = "grpc"): client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1718,7 +1934,8 @@ def test_delete_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1734,7 +1951,8 @@ async def test_delete_instance_async( transport: str = "grpc_asyncio", request_type=datafusion.DeleteInstanceRequest ): client = DataFusionAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1764,13 +1982,15 @@ async def test_delete_instance_async_from_dict(): def test_delete_instance_field_headers(): - client = DataFusionClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datafusion.DeleteInstanceRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: @@ -1784,18 +2004,23 @@ def test_delete_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_delete_instance_field_headers_async(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datafusion.DeleteInstanceRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: @@ -1811,11 +2036,16 @@ async def test_delete_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_instance_flattened(): - client = DataFusionClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: @@ -1823,7 +2053,9 @@ def test_delete_instance_flattened(): call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_instance(name="name_value",) + client.delete_instance( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1835,19 +2067,24 @@ def test_delete_instance_flattened(): def test_delete_instance_flattened_error(): - client = DataFusionClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_instance( - datafusion.DeleteInstanceRequest(), name="name_value", + datafusion.DeleteInstanceRequest(), + name="name_value", ) @pytest.mark.asyncio async def test_delete_instance_flattened_async(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_instance), "__call__") as call: @@ -1859,7 +2096,9 @@ async def test_delete_instance_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_instance(name="name_value",) + response = await client.delete_instance( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1872,20 +2111,30 @@ async def test_delete_instance_flattened_async(): @pytest.mark.asyncio async def test_delete_instance_flattened_error_async(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_instance( - datafusion.DeleteInstanceRequest(), name="name_value", + datafusion.DeleteInstanceRequest(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [datafusion.UpdateInstanceRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datafusion.UpdateInstanceRequest, + dict, + ], +) def test_update_instance(request_type, transport: str = "grpc"): client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1911,7 +2160,8 @@ def test_update_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1927,7 +2177,8 @@ async def test_update_instance_async( transport: str = "grpc_asyncio", request_type=datafusion.UpdateInstanceRequest ): client = DataFusionAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1957,13 +2208,15 @@ async def test_update_instance_async_from_dict(): def test_update_instance_field_headers(): - client = DataFusionClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datafusion.UpdateInstanceRequest() - request.instance.name = "instance.name/value" + request.instance.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: @@ -1977,20 +2230,23 @@ def test_update_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "instance.name=instance.name/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "instance.name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_update_instance_field_headers_async(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datafusion.UpdateInstanceRequest() - request.instance.name = "instance.name/value" + request.instance.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: @@ -2006,13 +2262,16 @@ async def test_update_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "instance.name=instance.name/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "instance.name=name_value", + ) in kw["metadata"] def test_update_instance_flattened(): - client = DataFusionClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: @@ -2038,7 +2297,9 @@ def test_update_instance_flattened(): def test_update_instance_flattened_error(): - client = DataFusionClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2052,7 +2313,9 @@ def test_update_instance_flattened_error(): @pytest.mark.asyncio async def test_update_instance_flattened_async(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_instance), "__call__") as call: @@ -2083,7 +2346,9 @@ async def test_update_instance_flattened_async(): @pytest.mark.asyncio async def test_update_instance_flattened_error_async(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2095,10 +2360,17 @@ async def test_update_instance_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [datafusion.RestartInstanceRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + datafusion.RestartInstanceRequest, + dict, + ], +) def test_restart_instance(request_type, transport: str = "grpc"): client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2124,7 +2396,8 @@ def test_restart_instance_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2140,7 +2413,8 @@ async def test_restart_instance_async( transport: str = "grpc_asyncio", request_type=datafusion.RestartInstanceRequest ): client = DataFusionAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2170,13 +2444,15 @@ async def test_restart_instance_async_from_dict(): def test_restart_instance_field_headers(): - client = DataFusionClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datafusion.RestartInstanceRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: @@ -2190,18 +2466,23 @@ def test_restart_instance_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_restart_instance_field_headers_async(): - client = DataFusionAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = DataFusionAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = datafusion.RestartInstanceRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.restart_instance), "__call__") as call: @@ -2217,7 +2498,10 @@ async def test_restart_instance_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_credentials_transport_error(): @@ -2227,7 +2511,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -2247,7 +2532,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = DataFusionClient(client_options=options, transport=transport,) + client = DataFusionClient( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -2263,7 +2551,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = DataFusionClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -2293,7 +2582,10 @@ def test_transport_get_channel(): @pytest.mark.parametrize( "transport_class", - [transports.DataFusionGrpcTransport, transports.DataFusionGrpcAsyncIOTransport,], + [ + transports.DataFusionGrpcTransport, + transports.DataFusionGrpcAsyncIOTransport, + ], ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -2303,10 +2595,28 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = DataFusionClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = DataFusionClient(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.DataFusionGrpcTransport,) + client = DataFusionClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataFusionGrpcTransport, + ) def test_data_fusion_base_transport_error(): @@ -2351,6 +2661,14 @@ def test_data_fusion_base_transport(): with pytest.raises(NotImplementedError): transport.operations_client + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_data_fusion_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2362,7 +2680,8 @@ def test_data_fusion_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.DataFusionTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2397,7 +2716,10 @@ def test_data_fusion_auth_adc(): @pytest.mark.parametrize( "transport_class", - [transports.DataFusionGrpcTransport, transports.DataFusionGrpcAsyncIOTransport,], + [ + transports.DataFusionGrpcTransport, + transports.DataFusionGrpcAsyncIOTransport, + ], ) def test_data_fusion_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use @@ -2489,24 +2811,40 @@ def test_data_fusion_grpc_transport_client_cert_source_for_mtls(transport_class) ) -def test_data_fusion_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_data_fusion_host_no_port(transport_name): client = DataFusionClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datafusion.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "datafusion.googleapis.com:443" + assert client.transport._host == ("datafusion.googleapis.com:443") -def test_data_fusion_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_data_fusion_host_with_port(transport_name): client = DataFusionClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="datafusion.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "datafusion.googleapis.com:8000" + assert client.transport._host == ("datafusion.googleapis.com:8000") def test_data_fusion_grpc_transport_channel(): @@ -2514,7 +2852,8 @@ def test_data_fusion_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.DataFusionGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2526,7 +2865,8 @@ def test_data_fusion_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.DataFusionGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2627,12 +2967,16 @@ def test_data_fusion_transport_channel_mtls_with_adc(transport_class): def test_data_fusion_grpc_lro_client(): client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2640,12 +2984,16 @@ def test_data_fusion_grpc_lro_client(): def test_data_fusion_grpc_lro_async_client(): client = DataFusionAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) transport = client.transport # Ensure that we have a api-core operations client. - assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client @@ -2657,7 +3005,10 @@ def test_crypto_key_path(): key_ring = "whelk" crypto_key = "octopus" expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( - project=project, location=location, key_ring=key_ring, crypto_key=crypto_key, + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, ) actual = DataFusionClient.crypto_key_path(project, location, key_ring, crypto_key) assert expected == actual @@ -2682,7 +3033,9 @@ def test_instance_path(): location = "nautilus" instance = "scallop" expected = "projects/{project}/locations/{location}/instances/{instance}".format( - project=project, location=location, instance=instance, + project=project, + location=location, + instance=instance, ) actual = DataFusionClient.instance_path(project, location, instance) assert expected == actual @@ -2723,7 +3076,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "oyster" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = DataFusionClient.common_folder_path(folder) assert expected == actual @@ -2741,7 +3096,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "cuttlefish" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = DataFusionClient.common_organization_path(organization) assert expected == actual @@ -2759,7 +3116,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "winkle" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = DataFusionClient.common_project_path(project) assert expected == actual @@ -2779,7 +3138,8 @@ def test_common_location_path(): project = "scallop" location = "abalone" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = DataFusionClient.common_location_path(project, location) assert expected == actual @@ -2804,7 +3164,8 @@ def test_client_with_default_client_info(): transports.DataFusionTransport, "_prep_wrapped_messages" ) as prep: client = DataFusionClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2813,7 +3174,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = DataFusionClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2821,7 +3183,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = DataFusionAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close"