From bf5348be1fabd967f2a0df3719c4046378ebf4c4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 19 May 2023 15:44:13 -0400 Subject: [PATCH 1/3] feat: add Oracle to PostgreSQL migration APIs (#176) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add Oracle to PostgreSQL migration APIs PiperOrigin-RevId: 533037268 Source-Link: https://github.com/googleapis/googleapis/commit/d45c9a2409c9e362d163d899f8479cd92959f93e Source-Link: https://github.com/googleapis/googleapis-gen/commit/40c33043e3d79a0dd38ec29dabde66490dcce809 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDBjMzMwNDNlM2Q3OWEwZGQzOGVjMjlkYWJkZTY2NDkwZGNjZTgwOSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- google/cloud/clouddms/__init__.py | 116 + google/cloud/clouddms_v1/__init__.py | 116 + google/cloud/clouddms_v1/gapic_metadata.json | 190 + .../data_migration_service/async_client.py | 2783 ++++++- .../services/data_migration_service/client.py | 2887 ++++++- .../services/data_migration_service/pagers.py | 520 +- .../data_migration_service/transports/base.py | 382 +- .../data_migration_service/transports/grpc.py | 757 +- .../transports/grpc_asyncio.py | 773 +- google/cloud/clouddms_v1/types/__init__.py | 116 + google/cloud/clouddms_v1/types/clouddms.py | 1038 ++- .../clouddms_v1/types/clouddms_resources.py | 801 +- .../types/conversionworkspace_resources.py | 1222 +++ ...ervice_apply_conversion_workspace_async.py | 57 + ...service_apply_conversion_workspace_sync.py | 57 + ...rvice_commit_conversion_workspace_async.py | 56 + ...ervice_commit_conversion_workspace_sync.py | 56 + ...vice_convert_conversion_workspace_async.py | 55 + ...rvice_convert_conversion_workspace_sync.py | 55 + ...rvice_create_conversion_workspace_async.py | 64 + ...ervice_create_conversion_workspace_sync.py | 64 + ...service_create_private_connection_async.py | 62 + ..._service_create_private_connection_sync.py | 62 + ...rvice_delete_conversion_workspace_async.py | 56 + ...ervice_delete_conversion_workspace_sync.py | 56 + ...service_delete_private_connection_async.py | 56 + ..._service_delete_private_connection_sync.py | 56 + ...be_conversion_workspace_revisions_async.py | 52 + ...ibe_conversion_workspace_revisions_sync.py | 52 + ...ervice_describe_database_entities_async.py | 53 + ...service_describe_database_entities_sync.py | 53 + ...igration_service_fetch_static_ips_async.py | 53 + ...migration_service_fetch_static_ips_sync.py | 53 + ..._service_get_conversion_workspace_async.py | 52 + ...n_service_get_conversion_workspace_sync.py | 52 + ...on_service_get_private_connection_async.py | 52 + ...ion_service_get_private_connection_sync.py | 52 + ...tion_service_import_mapping_rules_async.py | 56 + ...ation_service_import_mapping_rules_sync.py | 56 + ...ervice_list_conversion_workspaces_async.py | 53 + ...service_list_conversion_workspaces_sync.py | 53 + ..._service_list_private_connections_async.py | 53 + ...n_service_list_private_connections_sync.py | 53 + ...ice_rollback_conversion_workspace_async.py | 56 + ...vice_rollback_conversion_workspace_sync.py | 56 + ...on_service_search_background_jobs_async.py | 52 + ...ion_service_search_background_jobs_sync.py | 52 + ...service_seed_conversion_workspace_async.py | 56 + ..._service_seed_conversion_workspace_sync.py | 56 + ...rvice_update_conversion_workspace_async.py | 62 + ...ervice_update_conversion_workspace_sync.py | 62 + ...pet_metadata_google.cloud.clouddms.v1.json | 3965 +++++++-- scripts/fixup_clouddms_v1_keywords.py | 23 +- setup.py | 1 + testing/constraints-3.10.txt | 1 + testing/constraints-3.11.txt | 1 + testing/constraints-3.12.txt | 1 + testing/constraints-3.7.txt | 1 + testing/constraints-3.8.txt | 1 + testing/constraints-3.9.txt | 1 + .../test_data_migration_service.py | 7386 +++++++++++++++-- 61 files changed, 24036 insertions(+), 1158 deletions(-) create mode 100644 google/cloud/clouddms_v1/types/conversionworkspace_resources.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py create mode 100644 samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py diff --git a/google/cloud/clouddms/__init__.py b/google/cloud/clouddms/__init__.py index 2cda060..7d90354 100644 --- a/google/cloud/clouddms/__init__.py +++ b/google/cloud/clouddms/__init__.py @@ -25,89 +25,205 @@ DataMigrationServiceClient, ) from google.cloud.clouddms_v1.types.clouddms import ( + ApplyConversionWorkspaceRequest, + CommitConversionWorkspaceRequest, + ConvertConversionWorkspaceRequest, CreateConnectionProfileRequest, + CreateConversionWorkspaceRequest, CreateMigrationJobRequest, + CreatePrivateConnectionRequest, DeleteConnectionProfileRequest, + DeleteConversionWorkspaceRequest, DeleteMigrationJobRequest, + DeletePrivateConnectionRequest, + DescribeConversionWorkspaceRevisionsRequest, + DescribeConversionWorkspaceRevisionsResponse, + DescribeDatabaseEntitiesRequest, + DescribeDatabaseEntitiesResponse, + FetchStaticIpsRequest, + FetchStaticIpsResponse, GenerateSshScriptRequest, GetConnectionProfileRequest, + GetConversionWorkspaceRequest, GetMigrationJobRequest, + GetPrivateConnectionRequest, + ImportMappingRulesRequest, ListConnectionProfilesRequest, ListConnectionProfilesResponse, + ListConversionWorkspacesRequest, + ListConversionWorkspacesResponse, ListMigrationJobsRequest, ListMigrationJobsResponse, + ListPrivateConnectionsRequest, + ListPrivateConnectionsResponse, OperationMetadata, PromoteMigrationJobRequest, RestartMigrationJobRequest, ResumeMigrationJobRequest, + RollbackConversionWorkspaceRequest, + SearchBackgroundJobsRequest, + SearchBackgroundJobsResponse, + SeedConversionWorkspaceRequest, SshScript, StartMigrationJobRequest, StopMigrationJobRequest, UpdateConnectionProfileRequest, + UpdateConversionWorkspaceRequest, UpdateMigrationJobRequest, VerifyMigrationJobRequest, VmCreationConfig, VmSelectionConfig, ) from google.cloud.clouddms_v1.types.clouddms_resources import ( + AlloyDbConnectionProfile, + AlloyDbSettings, CloudSqlConnectionProfile, CloudSqlSettings, ConnectionProfile, + ConversionWorkspaceInfo, DatabaseEngine, DatabaseProvider, DatabaseType, + ForwardSshTunnelConnectivity, MigrationJob, MigrationJobVerificationError, MySqlConnectionProfile, + NetworkArchitecture, + OracleConnectionProfile, PostgreSqlConnectionProfile, + PrivateConnection, + PrivateConnectivity, + PrivateServiceConnectConnectivity, ReverseSshConnectivity, SqlAclEntry, SqlIpConfig, SslConfig, StaticIpConnectivity, + StaticServiceIpConnectivity, + VpcPeeringConfig, VpcPeeringConnectivity, ) +from google.cloud.clouddms_v1.types.conversionworkspace_resources import ( + BackgroundJobLogEntry, + BackgroundJobType, + ColumnEntity, + ConstraintEntity, + ConversionWorkspace, + DatabaseEngineInfo, + DatabaseEntity, + DatabaseEntityType, + EntityMapping, + EntityMappingLogEntry, + FunctionEntity, + ImportRulesFileFormat, + IndexEntity, + PackageEntity, + SchemaEntity, + SequenceEntity, + StoredProcedureEntity, + SynonymEntity, + TableEntity, + TriggerEntity, + ViewEntity, +) __all__ = ( "DataMigrationServiceClient", "DataMigrationServiceAsyncClient", + "ApplyConversionWorkspaceRequest", + "CommitConversionWorkspaceRequest", + "ConvertConversionWorkspaceRequest", "CreateConnectionProfileRequest", + "CreateConversionWorkspaceRequest", "CreateMigrationJobRequest", + "CreatePrivateConnectionRequest", "DeleteConnectionProfileRequest", + "DeleteConversionWorkspaceRequest", "DeleteMigrationJobRequest", + "DeletePrivateConnectionRequest", + "DescribeConversionWorkspaceRevisionsRequest", + "DescribeConversionWorkspaceRevisionsResponse", + "DescribeDatabaseEntitiesRequest", + "DescribeDatabaseEntitiesResponse", + "FetchStaticIpsRequest", + "FetchStaticIpsResponse", "GenerateSshScriptRequest", "GetConnectionProfileRequest", + "GetConversionWorkspaceRequest", "GetMigrationJobRequest", + "GetPrivateConnectionRequest", + "ImportMappingRulesRequest", "ListConnectionProfilesRequest", "ListConnectionProfilesResponse", + "ListConversionWorkspacesRequest", + "ListConversionWorkspacesResponse", "ListMigrationJobsRequest", "ListMigrationJobsResponse", + "ListPrivateConnectionsRequest", + "ListPrivateConnectionsResponse", "OperationMetadata", "PromoteMigrationJobRequest", "RestartMigrationJobRequest", "ResumeMigrationJobRequest", + "RollbackConversionWorkspaceRequest", + "SearchBackgroundJobsRequest", + "SearchBackgroundJobsResponse", + "SeedConversionWorkspaceRequest", "SshScript", "StartMigrationJobRequest", "StopMigrationJobRequest", "UpdateConnectionProfileRequest", + "UpdateConversionWorkspaceRequest", "UpdateMigrationJobRequest", "VerifyMigrationJobRequest", "VmCreationConfig", "VmSelectionConfig", + "AlloyDbConnectionProfile", + "AlloyDbSettings", "CloudSqlConnectionProfile", "CloudSqlSettings", "ConnectionProfile", + "ConversionWorkspaceInfo", "DatabaseType", + "ForwardSshTunnelConnectivity", "MigrationJob", "MigrationJobVerificationError", "MySqlConnectionProfile", + "OracleConnectionProfile", "PostgreSqlConnectionProfile", + "PrivateConnection", + "PrivateConnectivity", + "PrivateServiceConnectConnectivity", "ReverseSshConnectivity", "SqlAclEntry", "SqlIpConfig", "SslConfig", "StaticIpConnectivity", + "StaticServiceIpConnectivity", + "VpcPeeringConfig", "VpcPeeringConnectivity", "DatabaseEngine", "DatabaseProvider", + "NetworkArchitecture", + "BackgroundJobLogEntry", + "ColumnEntity", + "ConstraintEntity", + "ConversionWorkspace", + "DatabaseEngineInfo", + "DatabaseEntity", + "EntityMapping", + "EntityMappingLogEntry", + "FunctionEntity", + "IndexEntity", + "PackageEntity", + "SchemaEntity", + "SequenceEntity", + "StoredProcedureEntity", + "SynonymEntity", + "TableEntity", + "TriggerEntity", + "ViewEntity", + "BackgroundJobType", + "DatabaseEntityType", + "ImportRulesFileFormat", ) diff --git a/google/cloud/clouddms_v1/__init__.py b/google/cloud/clouddms_v1/__init__.py index cc0f3b5..0412200 100644 --- a/google/cloud/clouddms_v1/__init__.py +++ b/google/cloud/clouddms_v1/__init__.py @@ -23,89 +23,205 @@ DataMigrationServiceClient, ) from .types.clouddms import ( + ApplyConversionWorkspaceRequest, + CommitConversionWorkspaceRequest, + ConvertConversionWorkspaceRequest, CreateConnectionProfileRequest, + CreateConversionWorkspaceRequest, CreateMigrationJobRequest, + CreatePrivateConnectionRequest, DeleteConnectionProfileRequest, + DeleteConversionWorkspaceRequest, DeleteMigrationJobRequest, + DeletePrivateConnectionRequest, + DescribeConversionWorkspaceRevisionsRequest, + DescribeConversionWorkspaceRevisionsResponse, + DescribeDatabaseEntitiesRequest, + DescribeDatabaseEntitiesResponse, + FetchStaticIpsRequest, + FetchStaticIpsResponse, GenerateSshScriptRequest, GetConnectionProfileRequest, + GetConversionWorkspaceRequest, GetMigrationJobRequest, + GetPrivateConnectionRequest, + ImportMappingRulesRequest, ListConnectionProfilesRequest, ListConnectionProfilesResponse, + ListConversionWorkspacesRequest, + ListConversionWorkspacesResponse, ListMigrationJobsRequest, ListMigrationJobsResponse, + ListPrivateConnectionsRequest, + ListPrivateConnectionsResponse, OperationMetadata, PromoteMigrationJobRequest, RestartMigrationJobRequest, ResumeMigrationJobRequest, + RollbackConversionWorkspaceRequest, + SearchBackgroundJobsRequest, + SearchBackgroundJobsResponse, + SeedConversionWorkspaceRequest, SshScript, StartMigrationJobRequest, StopMigrationJobRequest, UpdateConnectionProfileRequest, + UpdateConversionWorkspaceRequest, UpdateMigrationJobRequest, VerifyMigrationJobRequest, VmCreationConfig, VmSelectionConfig, ) from .types.clouddms_resources import ( + AlloyDbConnectionProfile, + AlloyDbSettings, CloudSqlConnectionProfile, CloudSqlSettings, ConnectionProfile, + ConversionWorkspaceInfo, DatabaseEngine, DatabaseProvider, DatabaseType, + ForwardSshTunnelConnectivity, MigrationJob, MigrationJobVerificationError, MySqlConnectionProfile, + NetworkArchitecture, + OracleConnectionProfile, PostgreSqlConnectionProfile, + PrivateConnection, + PrivateConnectivity, + PrivateServiceConnectConnectivity, ReverseSshConnectivity, SqlAclEntry, SqlIpConfig, SslConfig, StaticIpConnectivity, + StaticServiceIpConnectivity, + VpcPeeringConfig, VpcPeeringConnectivity, ) +from .types.conversionworkspace_resources import ( + BackgroundJobLogEntry, + BackgroundJobType, + ColumnEntity, + ConstraintEntity, + ConversionWorkspace, + DatabaseEngineInfo, + DatabaseEntity, + DatabaseEntityType, + EntityMapping, + EntityMappingLogEntry, + FunctionEntity, + ImportRulesFileFormat, + IndexEntity, + PackageEntity, + SchemaEntity, + SequenceEntity, + StoredProcedureEntity, + SynonymEntity, + TableEntity, + TriggerEntity, + ViewEntity, +) __all__ = ( "DataMigrationServiceAsyncClient", + "AlloyDbConnectionProfile", + "AlloyDbSettings", + "ApplyConversionWorkspaceRequest", + "BackgroundJobLogEntry", + "BackgroundJobType", "CloudSqlConnectionProfile", "CloudSqlSettings", + "ColumnEntity", + "CommitConversionWorkspaceRequest", "ConnectionProfile", + "ConstraintEntity", + "ConversionWorkspace", + "ConversionWorkspaceInfo", + "ConvertConversionWorkspaceRequest", "CreateConnectionProfileRequest", + "CreateConversionWorkspaceRequest", "CreateMigrationJobRequest", + "CreatePrivateConnectionRequest", "DataMigrationServiceClient", "DatabaseEngine", + "DatabaseEngineInfo", + "DatabaseEntity", + "DatabaseEntityType", "DatabaseProvider", "DatabaseType", "DeleteConnectionProfileRequest", + "DeleteConversionWorkspaceRequest", "DeleteMigrationJobRequest", + "DeletePrivateConnectionRequest", + "DescribeConversionWorkspaceRevisionsRequest", + "DescribeConversionWorkspaceRevisionsResponse", + "DescribeDatabaseEntitiesRequest", + "DescribeDatabaseEntitiesResponse", + "EntityMapping", + "EntityMappingLogEntry", + "FetchStaticIpsRequest", + "FetchStaticIpsResponse", + "ForwardSshTunnelConnectivity", + "FunctionEntity", "GenerateSshScriptRequest", "GetConnectionProfileRequest", + "GetConversionWorkspaceRequest", "GetMigrationJobRequest", + "GetPrivateConnectionRequest", + "ImportMappingRulesRequest", + "ImportRulesFileFormat", + "IndexEntity", "ListConnectionProfilesRequest", "ListConnectionProfilesResponse", + "ListConversionWorkspacesRequest", + "ListConversionWorkspacesResponse", "ListMigrationJobsRequest", "ListMigrationJobsResponse", + "ListPrivateConnectionsRequest", + "ListPrivateConnectionsResponse", "MigrationJob", "MigrationJobVerificationError", "MySqlConnectionProfile", + "NetworkArchitecture", "OperationMetadata", + "OracleConnectionProfile", + "PackageEntity", "PostgreSqlConnectionProfile", + "PrivateConnection", + "PrivateConnectivity", + "PrivateServiceConnectConnectivity", "PromoteMigrationJobRequest", "RestartMigrationJobRequest", "ResumeMigrationJobRequest", "ReverseSshConnectivity", + "RollbackConversionWorkspaceRequest", + "SchemaEntity", + "SearchBackgroundJobsRequest", + "SearchBackgroundJobsResponse", + "SeedConversionWorkspaceRequest", + "SequenceEntity", "SqlAclEntry", "SqlIpConfig", "SshScript", "SslConfig", "StartMigrationJobRequest", "StaticIpConnectivity", + "StaticServiceIpConnectivity", "StopMigrationJobRequest", + "StoredProcedureEntity", + "SynonymEntity", + "TableEntity", + "TriggerEntity", "UpdateConnectionProfileRequest", + "UpdateConversionWorkspaceRequest", "UpdateMigrationJobRequest", "VerifyMigrationJobRequest", + "ViewEntity", "VmCreationConfig", "VmSelectionConfig", + "VpcPeeringConfig", "VpcPeeringConnectivity", ) diff --git a/google/cloud/clouddms_v1/gapic_metadata.json b/google/cloud/clouddms_v1/gapic_metadata.json index d505c03..a1d6f43 100644 --- a/google/cloud/clouddms_v1/gapic_metadata.json +++ b/google/cloud/clouddms_v1/gapic_metadata.json @@ -10,26 +10,76 @@ "grpc": { "libraryClient": "DataMigrationServiceClient", "rpcs": { + "ApplyConversionWorkspace": { + "methods": [ + "apply_conversion_workspace" + ] + }, + "CommitConversionWorkspace": { + "methods": [ + "commit_conversion_workspace" + ] + }, + "ConvertConversionWorkspace": { + "methods": [ + "convert_conversion_workspace" + ] + }, "CreateConnectionProfile": { "methods": [ "create_connection_profile" ] }, + "CreateConversionWorkspace": { + "methods": [ + "create_conversion_workspace" + ] + }, "CreateMigrationJob": { "methods": [ "create_migration_job" ] }, + "CreatePrivateConnection": { + "methods": [ + "create_private_connection" + ] + }, "DeleteConnectionProfile": { "methods": [ "delete_connection_profile" ] }, + "DeleteConversionWorkspace": { + "methods": [ + "delete_conversion_workspace" + ] + }, "DeleteMigrationJob": { "methods": [ "delete_migration_job" ] }, + "DeletePrivateConnection": { + "methods": [ + "delete_private_connection" + ] + }, + "DescribeConversionWorkspaceRevisions": { + "methods": [ + "describe_conversion_workspace_revisions" + ] + }, + "DescribeDatabaseEntities": { + "methods": [ + "describe_database_entities" + ] + }, + "FetchStaticIps": { + "methods": [ + "fetch_static_ips" + ] + }, "GenerateSshScript": { "methods": [ "generate_ssh_script" @@ -40,21 +90,46 @@ "get_connection_profile" ] }, + "GetConversionWorkspace": { + "methods": [ + "get_conversion_workspace" + ] + }, "GetMigrationJob": { "methods": [ "get_migration_job" ] }, + "GetPrivateConnection": { + "methods": [ + "get_private_connection" + ] + }, + "ImportMappingRules": { + "methods": [ + "import_mapping_rules" + ] + }, "ListConnectionProfiles": { "methods": [ "list_connection_profiles" ] }, + "ListConversionWorkspaces": { + "methods": [ + "list_conversion_workspaces" + ] + }, "ListMigrationJobs": { "methods": [ "list_migration_jobs" ] }, + "ListPrivateConnections": { + "methods": [ + "list_private_connections" + ] + }, "PromoteMigrationJob": { "methods": [ "promote_migration_job" @@ -70,6 +145,21 @@ "resume_migration_job" ] }, + "RollbackConversionWorkspace": { + "methods": [ + "rollback_conversion_workspace" + ] + }, + "SearchBackgroundJobs": { + "methods": [ + "search_background_jobs" + ] + }, + "SeedConversionWorkspace": { + "methods": [ + "seed_conversion_workspace" + ] + }, "StartMigrationJob": { "methods": [ "start_migration_job" @@ -85,6 +175,11 @@ "update_connection_profile" ] }, + "UpdateConversionWorkspace": { + "methods": [ + "update_conversion_workspace" + ] + }, "UpdateMigrationJob": { "methods": [ "update_migration_job" @@ -100,26 +195,76 @@ "grpc-async": { "libraryClient": "DataMigrationServiceAsyncClient", "rpcs": { + "ApplyConversionWorkspace": { + "methods": [ + "apply_conversion_workspace" + ] + }, + "CommitConversionWorkspace": { + "methods": [ + "commit_conversion_workspace" + ] + }, + "ConvertConversionWorkspace": { + "methods": [ + "convert_conversion_workspace" + ] + }, "CreateConnectionProfile": { "methods": [ "create_connection_profile" ] }, + "CreateConversionWorkspace": { + "methods": [ + "create_conversion_workspace" + ] + }, "CreateMigrationJob": { "methods": [ "create_migration_job" ] }, + "CreatePrivateConnection": { + "methods": [ + "create_private_connection" + ] + }, "DeleteConnectionProfile": { "methods": [ "delete_connection_profile" ] }, + "DeleteConversionWorkspace": { + "methods": [ + "delete_conversion_workspace" + ] + }, "DeleteMigrationJob": { "methods": [ "delete_migration_job" ] }, + "DeletePrivateConnection": { + "methods": [ + "delete_private_connection" + ] + }, + "DescribeConversionWorkspaceRevisions": { + "methods": [ + "describe_conversion_workspace_revisions" + ] + }, + "DescribeDatabaseEntities": { + "methods": [ + "describe_database_entities" + ] + }, + "FetchStaticIps": { + "methods": [ + "fetch_static_ips" + ] + }, "GenerateSshScript": { "methods": [ "generate_ssh_script" @@ -130,21 +275,46 @@ "get_connection_profile" ] }, + "GetConversionWorkspace": { + "methods": [ + "get_conversion_workspace" + ] + }, "GetMigrationJob": { "methods": [ "get_migration_job" ] }, + "GetPrivateConnection": { + "methods": [ + "get_private_connection" + ] + }, + "ImportMappingRules": { + "methods": [ + "import_mapping_rules" + ] + }, "ListConnectionProfiles": { "methods": [ "list_connection_profiles" ] }, + "ListConversionWorkspaces": { + "methods": [ + "list_conversion_workspaces" + ] + }, "ListMigrationJobs": { "methods": [ "list_migration_jobs" ] }, + "ListPrivateConnections": { + "methods": [ + "list_private_connections" + ] + }, "PromoteMigrationJob": { "methods": [ "promote_migration_job" @@ -160,6 +330,21 @@ "resume_migration_job" ] }, + "RollbackConversionWorkspace": { + "methods": [ + "rollback_conversion_workspace" + ] + }, + "SearchBackgroundJobs": { + "methods": [ + "search_background_jobs" + ] + }, + "SeedConversionWorkspace": { + "methods": [ + "seed_conversion_workspace" + ] + }, "StartMigrationJob": { "methods": [ "start_migration_job" @@ -175,6 +360,11 @@ "update_connection_profile" ] }, + "UpdateConversionWorkspace": { + "methods": [ + "update_conversion_workspace" + ] + }, "UpdateMigrationJob": { "methods": [ "update_migration_job" diff --git a/google/cloud/clouddms_v1/services/data_migration_service/async_client.py b/google/cloud/clouddms_v1/services/data_migration_service/async_client.py index 0a271e1..110fb2a 100644 --- a/google/cloud/clouddms_v1/services/data_migration_service/async_client.py +++ b/google/cloud/clouddms_v1/services/data_migration_service/async_client.py @@ -44,6 +44,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -51,7 +55,11 @@ from google.rpc import status_pb2 # type: ignore from google.cloud.clouddms_v1.services.data_migration_service import pagers -from google.cloud.clouddms_v1.types import clouddms, clouddms_resources +from google.cloud.clouddms_v1.types import ( + clouddms, + clouddms_resources, + conversionworkspace_resources, +) from .client import DataMigrationServiceClient from .transports.base import DEFAULT_CLIENT_INFO, DataMigrationServiceTransport @@ -72,10 +80,24 @@ class DataMigrationServiceAsyncClient: parse_connection_profile_path = staticmethod( DataMigrationServiceClient.parse_connection_profile_path ) + conversion_workspace_path = staticmethod( + DataMigrationServiceClient.conversion_workspace_path + ) + parse_conversion_workspace_path = staticmethod( + DataMigrationServiceClient.parse_conversion_workspace_path + ) migration_job_path = staticmethod(DataMigrationServiceClient.migration_job_path) parse_migration_job_path = staticmethod( DataMigrationServiceClient.parse_migration_job_path ) + networks_path = staticmethod(DataMigrationServiceClient.networks_path) + parse_networks_path = staticmethod(DataMigrationServiceClient.parse_networks_path) + private_connection_path = staticmethod( + DataMigrationServiceClient.private_connection_path + ) + parse_private_connection_path = staticmethod( + DataMigrationServiceClient.parse_private_connection_path + ) common_billing_account_path = staticmethod( DataMigrationServiceClient.common_billing_account_path ) @@ -271,10 +293,10 @@ async def sample_list_migration_jobs(): Args: request (Optional[Union[google.cloud.clouddms_v1.types.ListMigrationJobsRequest, dict]]): - The request object. Retrieve a list of all migration jobs - in a given project and location. + The request object. Retrieves a list of all migration + jobs in a given project and location. parent (:class:`str`): - Required. The parent, which owns this + Required. The parent which owns this collection of migrationJobs. This corresponds to the ``parent`` field @@ -507,7 +529,7 @@ async def sample_create_migration_job(): Database Migration Service migration job in the specified project and region. parent (:class:`str`): - Required. The parent, which owns this + Required. The parent which owns this collection of migration jobs. This corresponds to the ``parent`` field @@ -660,9 +682,9 @@ async def sample_update_migration_job(): should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Field mask is used to - specify the fields to be overwritten in - the migration job resource by the - update. + specify the fields to be overwritten by + the update in the conversion workspace + resource. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1534,8 +1556,8 @@ async def list_connection_profiles( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListConnectionProfilesAsyncPager: - r"""Retrieve a list of all connection profiles in a given - project and location. + r"""Retrieves a list of all connection profiles in a + given project and location. .. code-block:: python @@ -1569,7 +1591,7 @@ async def sample_list_connection_profiles(): The request object. Request message for 'ListConnectionProfiles' request. parent (:class:`str`): - Required. The parent, which owns this + Required. The parent which owns this collection of connection profiles. This corresponds to the ``parent`` field @@ -1798,7 +1820,7 @@ async def sample_create_connection_profile(): The request object. Request message for 'CreateConnectionProfile' request. parent (:class:`str`): - Required. The parent, which owns this + Required. The parent which owns this collection of connection profiles. This corresponds to the ``parent`` field @@ -1949,9 +1971,9 @@ async def sample_update_connection_profile(): should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Field mask is used to - specify the fields to be overwritten in - the connection profile resource by the - update. + specify the fields to be overwritten by + the update in the conversion workspace + resource. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2152,6 +2174,2737 @@ async def sample_delete_connection_profile(): # Done; return the response. return response + async def create_private_connection( + self, + request: Optional[Union[clouddms.CreatePrivateConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + private_connection: Optional[clouddms_resources.PrivateConnection] = None, + private_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new private connection in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_create_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + private_connection = clouddms_v1.PrivateConnection() + private_connection.vpc_peering_config.vpc_name = "vpc_name_value" + private_connection.vpc_peering_config.subnet = "subnet_value" + + request = clouddms_v1.CreatePrivateConnectionRequest( + parent="parent_value", + private_connection_id="private_connection_id_value", + private_connection=private_connection, + ) + + # Make the request + operation = client.create_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest, dict]]): + The request object. Request message to create a new + private connection in the specified + project and region. + parent (:class:`str`): + Required. The parent that owns the + collection of PrivateConnections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection (:class:`google.cloud.clouddms_v1.types.PrivateConnection`): + Required. The private connection + resource to create. + + This corresponds to the ``private_connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection_id (:class:`str`): + Required. The private connection + identifier. + + This corresponds to the ``private_connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.clouddms_v1.types.PrivateConnection` The PrivateConnection resource is used to establish private connectivity + with the customer's network. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, private_connection, private_connection_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.CreatePrivateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if private_connection is not None: + request.private_connection = private_connection + if private_connection_id is not None: + request.private_connection_id = private_connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_private_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + clouddms_resources.PrivateConnection, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_private_connection( + self, + request: Optional[Union[clouddms.GetPrivateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms_resources.PrivateConnection: + r"""Gets details of a single private connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_get_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetPrivateConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_private_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.GetPrivateConnectionRequest, dict]]): + The request object. Request message to get a private + connection resource. + name (:class:`str`): + Required. The name of the private + connection to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.PrivateConnection: + The PrivateConnection resource is + used to establish private connectivity + with the customer's network. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.GetPrivateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_private_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_private_connections( + self, + request: Optional[Union[clouddms.ListPrivateConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPrivateConnectionsAsyncPager: + r"""Retrieves a list of private connections in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_list_private_connections(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListPrivateConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_private_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest, dict]]): + The request object. Request message to retrieve a list of + private connections in a given project + and location. + parent (:class:`str`): + Required. The parent that owns the + collection of private connections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsAsyncPager: + Response message for + 'ListPrivateConnections' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.ListPrivateConnectionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_private_connections, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListPrivateConnectionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_private_connection( + self, + request: Optional[Union[clouddms.DeletePrivateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single Database Migration Service private + connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_delete_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeletePrivateConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest, dict]]): + The request object. Request message to delete a private + connection. + name (:class:`str`): + Required. The name of the private + connection to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.DeletePrivateConnectionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_private_connection, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_conversion_workspace( + self, + request: Optional[Union[clouddms.GetConversionWorkspaceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversionworkspace_resources.ConversionWorkspace: + r"""Gets details of a single conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_get_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_conversion_workspace(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'GetConversionWorkspace' request. + name (:class:`str`): + Required. Name of the conversion + workspace resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.ConversionWorkspace: + The main conversion workspace + resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.GetConversionWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_conversion_workspaces( + self, + request: Optional[Union[clouddms.ListConversionWorkspacesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConversionWorkspacesAsyncPager: + r"""Lists conversion workspaces in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_list_conversion_workspaces(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConversionWorkspacesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversion_workspaces(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest, dict]]): + The request object. Retrieve a list of all conversion + workspaces in a given project and + location. + parent (:class:`str`): + Required. The parent which owns this + collection of conversion workspaces. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesAsyncPager: + Response message for + 'ListConversionWorkspaces' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.ListConversionWorkspacesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_conversion_workspaces, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListConversionWorkspacesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_conversion_workspace( + self, + request: Optional[ + Union[clouddms.CreateConversionWorkspaceRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + conversion_workspace: Optional[ + conversionworkspace_resources.ConversionWorkspace + ] = None, + conversion_workspace_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new conversion workspace in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_create_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.CreateConversionWorkspaceRequest( + parent="parent_value", + conversion_workspace_id="conversion_workspace_id_value", + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.create_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest, dict]]): + The request object. Request message to create a new + Conversion Workspace in the specified + project and region. + parent (:class:`str`): + Required. The parent which owns this + collection of conversion workspaces. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + conversion_workspace (:class:`google.cloud.clouddms_v1.types.ConversionWorkspace`): + Required. Represents a conversion + workspace object. + + This corresponds to the ``conversion_workspace`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + conversion_workspace_id (:class:`str`): + Required. The ID of the conversion + workspace to create. + + This corresponds to the ``conversion_workspace_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, conversion_workspace, conversion_workspace_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.CreateConversionWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if conversion_workspace is not None: + request.conversion_workspace = conversion_workspace + if conversion_workspace_id is not None: + request.conversion_workspace_id = conversion_workspace_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_conversion_workspace( + self, + request: Optional[ + Union[clouddms.UpdateConversionWorkspaceRequest, dict] + ] = None, + *, + conversion_workspace: Optional[ + conversionworkspace_resources.ConversionWorkspace + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates the parameters of a single conversion + workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_update_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.UpdateConversionWorkspaceRequest( + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.update_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'UpdateConversionWorkspace' request. + conversion_workspace (:class:`google.cloud.clouddms_v1.types.ConversionWorkspace`): + Required. The conversion workspace + parameters to update. + + This corresponds to the ``conversion_workspace`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Field mask is used to + specify the fields to be overwritten by + the update in the conversion workspace + resource. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([conversion_workspace, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.UpdateConversionWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if conversion_workspace is not None: + request.conversion_workspace = conversion_workspace + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("conversion_workspace.name", request.conversion_workspace.name),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_conversion_workspace( + self, + request: Optional[ + Union[clouddms.DeleteConversionWorkspaceRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a single conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_delete_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'DeleteConversionWorkspace' request. + name (:class:`str`): + Required. Name of the conversion + workspace resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.DeleteConversionWorkspaceRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def seed_conversion_workspace( + self, + request: Optional[Union[clouddms.SeedConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports a snapshot of the source database into the + conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_seed_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.SeedConversionWorkspaceRequest( + source_connection_profile="source_connection_profile_value", + ) + + # Make the request + operation = client.seed_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'SeedConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.SeedConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.seed_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def import_mapping_rules( + self, + request: Optional[Union[clouddms.ImportMappingRulesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Imports the mapping rules for a given conversion + workspace. Supports various formats of external rules + files. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_import_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ImportMappingRulesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.import_mapping_rules(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ImportMappingRulesRequest, dict]]): + The request object. Request message for + 'ImportMappingRules' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.ImportMappingRulesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.import_mapping_rules, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def convert_conversion_workspace( + self, + request: Optional[ + Union[clouddms.ConvertConversionWorkspaceRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a draft tree schema for the destination + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_convert_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ConvertConversionWorkspaceRequest( + ) + + # Make the request + operation = client.convert_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'ConvertConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.ConvertConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.convert_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def commit_conversion_workspace( + self, + request: Optional[ + Union[clouddms.CommitConversionWorkspaceRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Marks all the data in the conversion workspace as + committed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_commit_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.CommitConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.commit_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'CommitConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.CommitConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.commit_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def rollback_conversion_workspace( + self, + request: Optional[ + Union[clouddms.RollbackConversionWorkspaceRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Rolls back a conversion workspace to the last + committed snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_rollback_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.RollbackConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'RollbackConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.RollbackConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rollback_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def apply_conversion_workspace( + self, + request: Optional[Union[clouddms.ApplyConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation_async.AsyncOperation: + r"""Applies draft tree onto a specific destination + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_apply_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ApplyConversionWorkspaceRequest( + connection_profile="connection_profile_value", + name="name_value", + ) + + # Make the request + operation = client.apply_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest, dict]]): + The request object. Request message for + 'ApplyConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + request = clouddms.ApplyConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.apply_conversion_workspace, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + async def describe_database_entities( + self, + request: Optional[Union[clouddms.DescribeDatabaseEntitiesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.DescribeDatabaseEntitiesAsyncPager: + r"""Describes the database entities tree for a specific + conversion workspace and a specific tree type. + + Database entities are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are simple data + objects describing the structure of the client database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_describe_database_entities(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeDatabaseEntitiesRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + page_result = client.describe_database_entities(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest, dict]]): + The request object. Request message for + 'DescribeDatabaseEntities' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesAsyncPager: + Response message for + 'DescribeDatabaseEntities' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + request = clouddms.DescribeDatabaseEntitiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.describe_database_entities, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("conversion_workspace", request.conversion_workspace),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.DescribeDatabaseEntitiesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def search_background_jobs( + self, + request: Optional[Union[clouddms.SearchBackgroundJobsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.SearchBackgroundJobsResponse: + r"""Searches/lists the background jobs for a specific + conversion workspace. + + The background jobs are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are a way to expose + the data plane jobs log. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_search_background_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.SearchBackgroundJobsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = await client.search_background_jobs(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest, dict]]): + The request object. Request message for + 'SearchBackgroundJobs' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse: + Response message for + 'SearchBackgroundJobs' request. + + """ + # Create or coerce a protobuf request object. + request = clouddms.SearchBackgroundJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.search_background_jobs, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("conversion_workspace", request.conversion_workspace),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def describe_conversion_workspace_revisions( + self, + request: Optional[ + Union[clouddms.DescribeConversionWorkspaceRevisionsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.DescribeConversionWorkspaceRevisionsResponse: + r"""Retrieves a list of committed revisions of a specific + conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_describe_conversion_workspace_revisions(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeConversionWorkspaceRevisionsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = await client.describe_conversion_workspace_revisions(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest, dict]]): + The request object. Request message for + 'DescribeConversionWorkspaceRevisions' + request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse: + Response message for + 'DescribeConversionWorkspaceRevisions' + request. + + """ + # Create or coerce a protobuf request object. + request = clouddms.DescribeConversionWorkspaceRevisionsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.describe_conversion_workspace_revisions, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("conversion_workspace", request.conversion_workspace),) + ), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def fetch_static_ips( + self, + request: Optional[Union[clouddms.FetchStaticIpsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchStaticIpsAsyncPager: + r"""Fetches a set of static IP addresses that need to be + allowlisted by the customer when using the static-IP + connectivity method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + async def sample_fetch_static_ips(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.FetchStaticIpsRequest( + name="name_value", + ) + + # Make the request + page_result = client.fetch_static_ips(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.clouddms_v1.types.FetchStaticIpsRequest, dict]]): + The request object. Request message for 'FetchStaticIps' + request. + name (:class:`str`): + Required. The resource name for the location for which + static IPs should be returned. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsAsyncPager: + Response message for a + 'FetchStaticIps' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = clouddms.FetchStaticIpsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.fetch_static_ips, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FetchStaticIpsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/google/cloud/clouddms_v1/services/data_migration_service/client.py b/google/cloud/clouddms_v1/services/data_migration_service/client.py index e5188db..2ba298b 100644 --- a/google/cloud/clouddms_v1/services/data_migration_service/client.py +++ b/google/cloud/clouddms_v1/services/data_migration_service/client.py @@ -48,6 +48,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 from google.protobuf import duration_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -55,7 +59,11 @@ from google.rpc import status_pb2 # type: ignore from google.cloud.clouddms_v1.services.data_migration_service import pagers -from google.cloud.clouddms_v1.types import clouddms, clouddms_resources +from google.cloud.clouddms_v1.types import ( + clouddms, + clouddms_resources, + conversionworkspace_resources, +) from .transports.base import DEFAULT_CLIENT_INFO, DataMigrationServiceTransport from .transports.grpc import DataMigrationServiceGrpcTransport @@ -205,6 +213,28 @@ def parse_connection_profile_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def conversion_workspace_path( + project: str, + location: str, + conversion_workspace: str, + ) -> str: + """Returns a fully-qualified conversion_workspace string.""" + return "projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}".format( + project=project, + location=location, + conversion_workspace=conversion_workspace, + ) + + @staticmethod + def parse_conversion_workspace_path(path: str) -> Dict[str, str]: + """Parses a conversion_workspace path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/conversionWorkspaces/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def migration_job_path( project: str, @@ -227,6 +257,47 @@ def parse_migration_job_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def networks_path( + project: str, + network: str, + ) -> str: + """Returns a fully-qualified networks string.""" + return "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + + @staticmethod + def parse_networks_path(path: str) -> Dict[str, str]: + """Parses a networks path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/global/networks/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + @staticmethod + def private_connection_path( + project: str, + location: str, + private_connection: str, + ) -> str: + """Returns a fully-qualified private_connection string.""" + return "projects/{project}/locations/{location}/privateConnections/{private_connection}".format( + project=project, + location=location, + private_connection=private_connection, + ) + + @staticmethod + def parse_private_connection_path(path: str) -> Dict[str, str]: + """Parses a private_connection path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/privateConnections/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -511,10 +582,10 @@ def sample_list_migration_jobs(): Args: request (Union[google.cloud.clouddms_v1.types.ListMigrationJobsRequest, dict]): - The request object. Retrieve a list of all migration jobs - in a given project and location. + The request object. Retrieves a list of all migration + jobs in a given project and location. parent (str): - Required. The parent, which owns this + Required. The parent which owns this collection of migrationJobs. This corresponds to the ``parent`` field @@ -747,7 +818,7 @@ def sample_create_migration_job(): Database Migration Service migration job in the specified project and region. parent (str): - Required. The parent, which owns this + Required. The parent which owns this collection of migration jobs. This corresponds to the ``parent`` field @@ -900,9 +971,9 @@ def sample_update_migration_job(): should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to - specify the fields to be overwritten in - the migration job resource by the - update. + specify the fields to be overwritten by + the update in the conversion workspace + resource. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -1781,8 +1852,8 @@ def list_connection_profiles( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListConnectionProfilesPager: - r"""Retrieve a list of all connection profiles in a given - project and location. + r"""Retrieves a list of all connection profiles in a + given project and location. .. code-block:: python @@ -1816,7 +1887,7 @@ def sample_list_connection_profiles(): The request object. Request message for 'ListConnectionProfiles' request. parent (str): - Required. The parent, which owns this + Required. The parent which owns this collection of connection profiles. This corresponds to the ``parent`` field @@ -2045,7 +2116,7 @@ def sample_create_connection_profile(): The request object. Request message for 'CreateConnectionProfile' request. parent (str): - Required. The parent, which owns this + Required. The parent which owns this collection of connection profiles. This corresponds to the ``parent`` field @@ -2198,9 +2269,9 @@ def sample_update_connection_profile(): should not be set. update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to - specify the fields to be overwritten in - the connection profile resource by the - update. + specify the fields to be overwritten by + the update in the conversion workspace + resource. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this @@ -2405,18 +2476,2786 @@ def sample_delete_connection_profile(): # Done; return the response. return response - def __enter__(self) -> "DataMigrationServiceClient": - return self + def create_private_connection( + self, + request: Optional[Union[clouddms.CreatePrivateConnectionRequest, dict]] = None, + *, + parent: Optional[str] = None, + private_connection: Optional[clouddms_resources.PrivateConnection] = None, + private_connection_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new private connection in a given project + and location. - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_create_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + private_connection = clouddms_v1.PrivateConnection() + private_connection.vpc_peering_config.vpc_name = "vpc_name_value" + private_connection.vpc_peering_config.subnet = "subnet_value" + + request = clouddms_v1.CreatePrivateConnectionRequest( + parent="parent_value", + private_connection_id="private_connection_id_value", + private_connection=private_connection, + ) + + # Make the request + operation = client.create_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest, dict]): + The request object. Request message to create a new + private connection in the specified + project and region. + parent (str): + Required. The parent that owns the + collection of PrivateConnections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection (google.cloud.clouddms_v1.types.PrivateConnection): + Required. The private connection + resource to create. + + This corresponds to the ``private_connection`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + private_connection_id (str): + Required. The private connection + identifier. + + This corresponds to the ``private_connection_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.clouddms_v1.types.PrivateConnection` The PrivateConnection resource is used to establish private connectivity + with the customer's network. - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! """ - self.transport.close() + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, private_connection, private_connection_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.CreatePrivateConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.CreatePrivateConnectionRequest): + request = clouddms.CreatePrivateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if private_connection is not None: + request.private_connection = private_connection + if private_connection_id is not None: + request.private_connection_id = private_connection_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_private_connection + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + clouddms_resources.PrivateConnection, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_private_connection( + self, + request: Optional[Union[clouddms.GetPrivateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms_resources.PrivateConnection: + r"""Gets details of a single private connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_get_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetPrivateConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_private_connection(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.GetPrivateConnectionRequest, dict]): + The request object. Request message to get a private + connection resource. + name (str): + Required. The name of the private + connection to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.PrivateConnection: + The PrivateConnection resource is + used to establish private connectivity + with the customer's network. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.GetPrivateConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.GetPrivateConnectionRequest): + request = clouddms.GetPrivateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_private_connection] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_private_connections( + self, + request: Optional[Union[clouddms.ListPrivateConnectionsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListPrivateConnectionsPager: + r"""Retrieves a list of private connections in a given + project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_list_private_connections(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListPrivateConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_private_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest, dict]): + The request object. Request message to retrieve a list of + private connections in a given project + and location. + parent (str): + Required. The parent that owns the + collection of private connections. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsPager: + Response message for + 'ListPrivateConnections' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ListPrivateConnectionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ListPrivateConnectionsRequest): + request = clouddms.ListPrivateConnectionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_private_connections] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPrivateConnectionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_private_connection( + self, + request: Optional[Union[clouddms.DeletePrivateConnectionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single Database Migration Service private + connection. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_delete_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeletePrivateConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest, dict]): + The request object. Request message to delete a private + connection. + name (str): + Required. The name of the private + connection to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.DeletePrivateConnectionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.DeletePrivateConnectionRequest): + request = clouddms.DeletePrivateConnectionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_private_connection + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_conversion_workspace( + self, + request: Optional[Union[clouddms.GetConversionWorkspaceRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> conversionworkspace_resources.ConversionWorkspace: + r"""Gets details of a single conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_get_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + response = client.get_conversion_workspace(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest, dict]): + The request object. Request message for + 'GetConversionWorkspace' request. + name (str): + Required. Name of the conversion + workspace resource to get. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.ConversionWorkspace: + The main conversion workspace + resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.GetConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.GetConversionWorkspaceRequest): + request = clouddms.GetConversionWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_conversion_workspace] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_conversion_workspaces( + self, + request: Optional[Union[clouddms.ListConversionWorkspacesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListConversionWorkspacesPager: + r"""Lists conversion workspaces in a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_list_conversion_workspaces(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConversionWorkspacesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversion_workspaces(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest, dict]): + The request object. Retrieve a list of all conversion + workspaces in a given project and + location. + parent (str): + Required. The parent which owns this + collection of conversion workspaces. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesPager: + Response message for + 'ListConversionWorkspaces' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ListConversionWorkspacesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ListConversionWorkspacesRequest): + request = clouddms.ListConversionWorkspacesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_conversion_workspaces + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListConversionWorkspacesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_conversion_workspace( + self, + request: Optional[ + Union[clouddms.CreateConversionWorkspaceRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + conversion_workspace: Optional[ + conversionworkspace_resources.ConversionWorkspace + ] = None, + conversion_workspace_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a new conversion workspace in a given project + and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_create_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.CreateConversionWorkspaceRequest( + parent="parent_value", + conversion_workspace_id="conversion_workspace_id_value", + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.create_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest, dict]): + The request object. Request message to create a new + Conversion Workspace in the specified + project and region. + parent (str): + Required. The parent which owns this + collection of conversion workspaces. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): + Required. Represents a conversion + workspace object. + + This corresponds to the ``conversion_workspace`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + conversion_workspace_id (str): + Required. The ID of the conversion + workspace to create. + + This corresponds to the ``conversion_workspace_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any( + [parent, conversion_workspace, conversion_workspace_id] + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.CreateConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.CreateConversionWorkspaceRequest): + request = clouddms.CreateConversionWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if conversion_workspace is not None: + request.conversion_workspace = conversion_workspace + if conversion_workspace_id is not None: + request.conversion_workspace_id = conversion_workspace_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_conversion_workspace + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_conversion_workspace( + self, + request: Optional[ + Union[clouddms.UpdateConversionWorkspaceRequest, dict] + ] = None, + *, + conversion_workspace: Optional[ + conversionworkspace_resources.ConversionWorkspace + ] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Updates the parameters of a single conversion + workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_update_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.UpdateConversionWorkspaceRequest( + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.update_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest, dict]): + The request object. Request message for + 'UpdateConversionWorkspace' request. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): + Required. The conversion workspace + parameters to update. + + This corresponds to the ``conversion_workspace`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to + specify the fields to be overwritten by + the update in the conversion workspace + resource. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([conversion_workspace, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.UpdateConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.UpdateConversionWorkspaceRequest): + request = clouddms.UpdateConversionWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if conversion_workspace is not None: + request.conversion_workspace = conversion_workspace + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_conversion_workspace + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("conversion_workspace.name", request.conversion_workspace.name),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_conversion_workspace( + self, + request: Optional[ + Union[clouddms.DeleteConversionWorkspaceRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Deletes a single conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_delete_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest, dict]): + The request object. Request message for + 'DeleteConversionWorkspace' request. + name (str): + Required. Name of the conversion + workspace resource to delete. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.DeleteConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.DeleteConversionWorkspaceRequest): + request = clouddms.DeleteConversionWorkspaceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_conversion_workspace + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def seed_conversion_workspace( + self, + request: Optional[Union[clouddms.SeedConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports a snapshot of the source database into the + conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_seed_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.SeedConversionWorkspaceRequest( + source_connection_profile="source_connection_profile_value", + ) + + # Make the request + operation = client.seed_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest, dict]): + The request object. Request message for + 'SeedConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.SeedConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.SeedConversionWorkspaceRequest): + request = clouddms.SeedConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.seed_conversion_workspace + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def import_mapping_rules( + self, + request: Optional[Union[clouddms.ImportMappingRulesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Imports the mapping rules for a given conversion + workspace. Supports various formats of external rules + files. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_import_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ImportMappingRulesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.import_mapping_rules(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ImportMappingRulesRequest, dict]): + The request object. Request message for + 'ImportMappingRules' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ImportMappingRulesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ImportMappingRulesRequest): + request = clouddms.ImportMappingRulesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.import_mapping_rules] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def convert_conversion_workspace( + self, + request: Optional[ + Union[clouddms.ConvertConversionWorkspaceRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Creates a draft tree schema for the destination + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_convert_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ConvertConversionWorkspaceRequest( + ) + + # Make the request + operation = client.convert_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest, dict]): + The request object. Request message for + 'ConvertConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ConvertConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ConvertConversionWorkspaceRequest): + request = clouddms.ConvertConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.convert_conversion_workspace + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def commit_conversion_workspace( + self, + request: Optional[ + Union[clouddms.CommitConversionWorkspaceRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Marks all the data in the conversion workspace as + committed. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_commit_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.CommitConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.commit_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest, dict]): + The request object. Request message for + 'CommitConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.CommitConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.CommitConversionWorkspaceRequest): + request = clouddms.CommitConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.commit_conversion_workspace + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def rollback_conversion_workspace( + self, + request: Optional[ + Union[clouddms.RollbackConversionWorkspaceRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Rolls back a conversion workspace to the last + committed snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_rollback_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.RollbackConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest, dict]): + The request object. Request message for + 'RollbackConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.RollbackConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.RollbackConversionWorkspaceRequest): + request = clouddms.RollbackConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.rollback_conversion_workspace + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def apply_conversion_workspace( + self, + request: Optional[Union[clouddms.ApplyConversionWorkspaceRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: + r"""Applies draft tree onto a specific destination + database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_apply_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ApplyConversionWorkspaceRequest( + connection_profile="connection_profile_value", + name="name_value", + ) + + # Make the request + operation = client.apply_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest, dict]): + The request object. Request message for + 'ApplyConversionWorkspace' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.clouddms_v1.types.ConversionWorkspace` + The main conversion workspace resource entity. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.ApplyConversionWorkspaceRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.ApplyConversionWorkspaceRequest): + request = clouddms.ApplyConversionWorkspaceRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.apply_conversion_workspace + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + conversionworkspace_resources.ConversionWorkspace, + metadata_type=clouddms.OperationMetadata, + ) + + # Done; return the response. + return response + + def describe_database_entities( + self, + request: Optional[Union[clouddms.DescribeDatabaseEntitiesRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.DescribeDatabaseEntitiesPager: + r"""Describes the database entities tree for a specific + conversion workspace and a specific tree type. + + Database entities are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are simple data + objects describing the structure of the client database. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_describe_database_entities(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeDatabaseEntitiesRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + page_result = client.describe_database_entities(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest, dict]): + The request object. Request message for + 'DescribeDatabaseEntities' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesPager: + Response message for + 'DescribeDatabaseEntities' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.DescribeDatabaseEntitiesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.DescribeDatabaseEntitiesRequest): + request = clouddms.DescribeDatabaseEntitiesRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.describe_database_entities + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("conversion_workspace", request.conversion_workspace),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.DescribeDatabaseEntitiesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def search_background_jobs( + self, + request: Optional[Union[clouddms.SearchBackgroundJobsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.SearchBackgroundJobsResponse: + r"""Searches/lists the background jobs for a specific + conversion workspace. + + The background jobs are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are a way to expose + the data plane jobs log. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_search_background_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.SearchBackgroundJobsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = client.search_background_jobs(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest, dict]): + The request object. Request message for + 'SearchBackgroundJobs' request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse: + Response message for + 'SearchBackgroundJobs' request. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.SearchBackgroundJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.SearchBackgroundJobsRequest): + request = clouddms.SearchBackgroundJobsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.search_background_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("conversion_workspace", request.conversion_workspace),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def describe_conversion_workspace_revisions( + self, + request: Optional[ + Union[clouddms.DescribeConversionWorkspaceRevisionsRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> clouddms.DescribeConversionWorkspaceRevisionsResponse: + r"""Retrieves a list of committed revisions of a specific + conversion workspace. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_describe_conversion_workspace_revisions(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeConversionWorkspaceRevisionsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = client.describe_conversion_workspace_revisions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest, dict]): + The request object. Request message for + 'DescribeConversionWorkspaceRevisions' + request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse: + Response message for + 'DescribeConversionWorkspaceRevisions' + request. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.DescribeConversionWorkspaceRevisionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance( + request, clouddms.DescribeConversionWorkspaceRevisionsRequest + ): + request = clouddms.DescribeConversionWorkspaceRevisionsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.describe_conversion_workspace_revisions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("conversion_workspace", request.conversion_workspace),) + ), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def fetch_static_ips( + self, + request: Optional[Union[clouddms.FetchStaticIpsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.FetchStaticIpsPager: + r"""Fetches a set of static IP addresses that need to be + allowlisted by the customer when using the static-IP + connectivity method. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import clouddms_v1 + + def sample_fetch_static_ips(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.FetchStaticIpsRequest( + name="name_value", + ) + + # Make the request + page_result = client.fetch_static_ips(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.clouddms_v1.types.FetchStaticIpsRequest, dict]): + The request object. Request message for 'FetchStaticIps' + request. + name (str): + Required. The resource name for the location for which + static IPs should be returned. Must be in the format + ``projects/*/locations/*``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsPager: + Response message for a + 'FetchStaticIps' request. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a clouddms.FetchStaticIpsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, clouddms.FetchStaticIpsRequest): + request = clouddms.FetchStaticIpsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.fetch_static_ips] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FetchStaticIpsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DataMigrationServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def set_iam_policy( + self, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/clouddms_v1/services/data_migration_service/pagers.py b/google/cloud/clouddms_v1/services/data_migration_service/pagers.py index ff6a717..63b5593 100644 --- a/google/cloud/clouddms_v1/services/data_migration_service/pagers.py +++ b/google/cloud/clouddms_v1/services/data_migration_service/pagers.py @@ -24,7 +24,11 @@ Tuple, ) -from google.cloud.clouddms_v1.types import clouddms, clouddms_resources +from google.cloud.clouddms_v1.types import ( + clouddms, + clouddms_resources, + conversionworkspace_resources, +) class ListMigrationJobsPager: @@ -281,3 +285,517 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPrivateConnectionsPager: + """A pager for iterating through ``list_private_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``private_connections`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListPrivateConnections`` requests and continue to iterate + through the ``private_connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., clouddms.ListPrivateConnectionsResponse], + request: clouddms.ListPrivateConnectionsRequest, + response: clouddms.ListPrivateConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListPrivateConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.ListPrivateConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[clouddms_resources.PrivateConnection]: + for page in self.pages: + yield from page.private_connections + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListPrivateConnectionsAsyncPager: + """A pager for iterating through ``list_private_connections`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``private_connections`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListPrivateConnections`` requests and continue to iterate + through the ``private_connections`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[clouddms.ListPrivateConnectionsResponse]], + request: clouddms.ListPrivateConnectionsRequest, + response: clouddms.ListPrivateConnectionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListPrivateConnectionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListPrivateConnectionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.ListPrivateConnectionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[clouddms_resources.PrivateConnection]: + async def async_generator(): + async for page in self.pages: + for response in page.private_connections: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListConversionWorkspacesPager: + """A pager for iterating through ``list_conversion_workspaces`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``conversion_workspaces`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListConversionWorkspaces`` requests and continue to iterate + through the ``conversion_workspaces`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., clouddms.ListConversionWorkspacesResponse], + request: clouddms.ListConversionWorkspacesRequest, + response: clouddms.ListConversionWorkspacesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListConversionWorkspacesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.ListConversionWorkspacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[conversionworkspace_resources.ConversionWorkspace]: + for page in self.pages: + yield from page.conversion_workspaces + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListConversionWorkspacesAsyncPager: + """A pager for iterating through ``list_conversion_workspaces`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``conversion_workspaces`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListConversionWorkspaces`` requests and continue to iterate + through the ``conversion_workspaces`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[clouddms.ListConversionWorkspacesResponse]], + request: clouddms.ListConversionWorkspacesRequest, + response: clouddms.ListConversionWorkspacesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.ListConversionWorkspacesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.ListConversionWorkspacesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.ListConversionWorkspacesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__( + self, + ) -> AsyncIterator[conversionworkspace_resources.ConversionWorkspace]: + async def async_generator(): + async for page in self.pages: + for response in page.conversion_workspaces: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class DescribeDatabaseEntitiesPager: + """A pager for iterating through ``describe_database_entities`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``database_entities`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``DescribeDatabaseEntities`` requests and continue to iterate + through the ``database_entities`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., clouddms.DescribeDatabaseEntitiesResponse], + request: clouddms.DescribeDatabaseEntitiesRequest, + response: clouddms.DescribeDatabaseEntitiesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.DescribeDatabaseEntitiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.DescribeDatabaseEntitiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[conversionworkspace_resources.DatabaseEntity]: + for page in self.pages: + yield from page.database_entities + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class DescribeDatabaseEntitiesAsyncPager: + """A pager for iterating through ``describe_database_entities`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``database_entities`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``DescribeDatabaseEntities`` requests and continue to iterate + through the ``database_entities`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[clouddms.DescribeDatabaseEntitiesResponse]], + request: clouddms.DescribeDatabaseEntitiesRequest, + response: clouddms.DescribeDatabaseEntitiesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.DescribeDatabaseEntitiesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.DescribeDatabaseEntitiesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[conversionworkspace_resources.DatabaseEntity]: + async def async_generator(): + async for page in self.pages: + for response in page.database_entities: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchStaticIpsPager: + """A pager for iterating through ``fetch_static_ips`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``static_ips`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FetchStaticIps`` requests and continue to iterate + through the ``static_ips`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., clouddms.FetchStaticIpsResponse], + request: clouddms.FetchStaticIpsRequest, + response: clouddms.FetchStaticIpsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.FetchStaticIpsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.FetchStaticIpsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.FetchStaticIpsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[clouddms.FetchStaticIpsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[str]: + for page in self.pages: + yield from page.static_ips + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FetchStaticIpsAsyncPager: + """A pager for iterating through ``fetch_static_ips`` requests. + + This class thinly wraps an initial + :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``static_ips`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FetchStaticIps`` requests and continue to iterate + through the ``static_ips`` field on the + corresponding responses. + + All the usual :class:`google.cloud.clouddms_v1.types.FetchStaticIpsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[clouddms.FetchStaticIpsResponse]], + request: clouddms.FetchStaticIpsRequest, + response: clouddms.FetchStaticIpsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.clouddms_v1.types.FetchStaticIpsRequest): + The initial request object. + response (google.cloud.clouddms_v1.types.FetchStaticIpsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = clouddms.FetchStaticIpsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[clouddms.FetchStaticIpsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[str]: + async def async_generator(): + async for page in self.pages: + for response in page.static_ips: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py b/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py index 8eb6c8c..f3bf91e 100644 --- a/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py +++ b/google/cloud/clouddms_v1/services/data_migration_service/transports/base.py @@ -22,11 +22,18 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.clouddms_v1 import gapic_version as package_version -from google.cloud.clouddms_v1.types import clouddms, clouddms_resources +from google.cloud.clouddms_v1.types import ( + clouddms, + clouddms_resources, + conversionworkspace_resources, +) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ @@ -208,6 +215,101 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.create_private_connection: gapic_v1.method.wrap_method( + self.create_private_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.get_private_connection: gapic_v1.method.wrap_method( + self.get_private_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.list_private_connections: gapic_v1.method.wrap_method( + self.list_private_connections, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_private_connection: gapic_v1.method.wrap_method( + self.delete_private_connection, + default_timeout=60.0, + client_info=client_info, + ), + self.get_conversion_workspace: gapic_v1.method.wrap_method( + self.get_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.list_conversion_workspaces: gapic_v1.method.wrap_method( + self.list_conversion_workspaces, + default_timeout=60.0, + client_info=client_info, + ), + self.create_conversion_workspace: gapic_v1.method.wrap_method( + self.create_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.update_conversion_workspace: gapic_v1.method.wrap_method( + self.update_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_conversion_workspace: gapic_v1.method.wrap_method( + self.delete_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.seed_conversion_workspace: gapic_v1.method.wrap_method( + self.seed_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.import_mapping_rules: gapic_v1.method.wrap_method( + self.import_mapping_rules, + default_timeout=60.0, + client_info=client_info, + ), + self.convert_conversion_workspace: gapic_v1.method.wrap_method( + self.convert_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.commit_conversion_workspace: gapic_v1.method.wrap_method( + self.commit_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.rollback_conversion_workspace: gapic_v1.method.wrap_method( + self.rollback_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.apply_conversion_workspace: gapic_v1.method.wrap_method( + self.apply_conversion_workspace, + default_timeout=60.0, + client_info=client_info, + ), + self.describe_database_entities: gapic_v1.method.wrap_method( + self.describe_database_entities, + default_timeout=60.0, + client_info=client_info, + ), + self.search_background_jobs: gapic_v1.method.wrap_method( + self.search_background_jobs, + default_timeout=60.0, + client_info=client_info, + ), + self.describe_conversion_workspace_revisions: gapic_v1.method.wrap_method( + self.describe_conversion_workspace_revisions, + default_timeout=60.0, + client_info=client_info, + ), + self.fetch_static_ips: gapic_v1.method.wrap_method( + self.fetch_static_ips, + default_timeout=60.0, + client_info=client_info, + ), } def close(self): @@ -388,6 +490,284 @@ def delete_connection_profile( ]: raise NotImplementedError() + @property + def create_private_connection( + self, + ) -> Callable[ + [clouddms.CreatePrivateConnectionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_private_connection( + self, + ) -> Callable[ + [clouddms.GetPrivateConnectionRequest], + Union[ + clouddms_resources.PrivateConnection, + Awaitable[clouddms_resources.PrivateConnection], + ], + ]: + raise NotImplementedError() + + @property + def list_private_connections( + self, + ) -> Callable[ + [clouddms.ListPrivateConnectionsRequest], + Union[ + clouddms.ListPrivateConnectionsResponse, + Awaitable[clouddms.ListPrivateConnectionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_private_connection( + self, + ) -> Callable[ + [clouddms.DeletePrivateConnectionRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_conversion_workspace( + self, + ) -> Callable[ + [clouddms.GetConversionWorkspaceRequest], + Union[ + conversionworkspace_resources.ConversionWorkspace, + Awaitable[conversionworkspace_resources.ConversionWorkspace], + ], + ]: + raise NotImplementedError() + + @property + def list_conversion_workspaces( + self, + ) -> Callable[ + [clouddms.ListConversionWorkspacesRequest], + Union[ + clouddms.ListConversionWorkspacesResponse, + Awaitable[clouddms.ListConversionWorkspacesResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_conversion_workspace( + self, + ) -> Callable[ + [clouddms.CreateConversionWorkspaceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_conversion_workspace( + self, + ) -> Callable[ + [clouddms.UpdateConversionWorkspaceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_conversion_workspace( + self, + ) -> Callable[ + [clouddms.DeleteConversionWorkspaceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def seed_conversion_workspace( + self, + ) -> Callable[ + [clouddms.SeedConversionWorkspaceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def import_mapping_rules( + self, + ) -> Callable[ + [clouddms.ImportMappingRulesRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def convert_conversion_workspace( + self, + ) -> Callable[ + [clouddms.ConvertConversionWorkspaceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def commit_conversion_workspace( + self, + ) -> Callable[ + [clouddms.CommitConversionWorkspaceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def rollback_conversion_workspace( + self, + ) -> Callable[ + [clouddms.RollbackConversionWorkspaceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def apply_conversion_workspace( + self, + ) -> Callable[ + [clouddms.ApplyConversionWorkspaceRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def describe_database_entities( + self, + ) -> Callable[ + [clouddms.DescribeDatabaseEntitiesRequest], + Union[ + clouddms.DescribeDatabaseEntitiesResponse, + Awaitable[clouddms.DescribeDatabaseEntitiesResponse], + ], + ]: + raise NotImplementedError() + + @property + def search_background_jobs( + self, + ) -> Callable[ + [clouddms.SearchBackgroundJobsRequest], + Union[ + clouddms.SearchBackgroundJobsResponse, + Awaitable[clouddms.SearchBackgroundJobsResponse], + ], + ]: + raise NotImplementedError() + + @property + def describe_conversion_workspace_revisions( + self, + ) -> Callable[ + [clouddms.DescribeConversionWorkspaceRevisionsRequest], + Union[ + clouddms.DescribeConversionWorkspaceRevisionsResponse, + Awaitable[clouddms.DescribeConversionWorkspaceRevisionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def fetch_static_ips( + self, + ) -> Callable[ + [clouddms.FetchStaticIpsRequest], + Union[ + clouddms.FetchStaticIpsResponse, Awaitable[clouddms.FetchStaticIpsResponse] + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py b/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py index 901808f..043ebc7 100644 --- a/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py +++ b/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc.py @@ -20,10 +20,17 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore -from google.cloud.clouddms_v1.types import clouddms, clouddms_resources +from google.cloud.clouddms_v1.types import ( + clouddms, + clouddms_resources, + conversionworkspace_resources, +) from .base import DEFAULT_CLIENT_INFO, DataMigrationServiceTransport @@ -576,8 +583,8 @@ def list_connection_profiles( ]: r"""Return a callable for the list connection profiles method over gRPC. - Retrieve a list of all connection profiles in a given - project and location. + Retrieves a list of all connection profiles in a + given project and location. Returns: Callable[[~.ListConnectionProfilesRequest], @@ -708,9 +715,753 @@ def delete_connection_profile( ) return self._stubs["delete_connection_profile"] + @property + def create_private_connection( + self, + ) -> Callable[[clouddms.CreatePrivateConnectionRequest], operations_pb2.Operation]: + r"""Return a callable for the create private connection method over gRPC. + + Creates a new private connection in a given project + and location. + + Returns: + Callable[[~.CreatePrivateConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_private_connection" not in self._stubs: + self._stubs["create_private_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/CreatePrivateConnection", + request_serializer=clouddms.CreatePrivateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_private_connection"] + + @property + def get_private_connection( + self, + ) -> Callable[ + [clouddms.GetPrivateConnectionRequest], clouddms_resources.PrivateConnection + ]: + r"""Return a callable for the get private connection method over gRPC. + + Gets details of a single private connection. + + Returns: + Callable[[~.GetPrivateConnectionRequest], + ~.PrivateConnection]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_private_connection" not in self._stubs: + self._stubs["get_private_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/GetPrivateConnection", + request_serializer=clouddms.GetPrivateConnectionRequest.serialize, + response_deserializer=clouddms_resources.PrivateConnection.deserialize, + ) + return self._stubs["get_private_connection"] + + @property + def list_private_connections( + self, + ) -> Callable[ + [clouddms.ListPrivateConnectionsRequest], + clouddms.ListPrivateConnectionsResponse, + ]: + r"""Return a callable for the list private connections method over gRPC. + + Retrieves a list of private connections in a given + project and location. + + Returns: + Callable[[~.ListPrivateConnectionsRequest], + ~.ListPrivateConnectionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_private_connections" not in self._stubs: + self._stubs["list_private_connections"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ListPrivateConnections", + request_serializer=clouddms.ListPrivateConnectionsRequest.serialize, + response_deserializer=clouddms.ListPrivateConnectionsResponse.deserialize, + ) + return self._stubs["list_private_connections"] + + @property + def delete_private_connection( + self, + ) -> Callable[[clouddms.DeletePrivateConnectionRequest], operations_pb2.Operation]: + r"""Return a callable for the delete private connection method over gRPC. + + Deletes a single Database Migration Service private + connection. + + Returns: + Callable[[~.DeletePrivateConnectionRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_private_connection" not in self._stubs: + self._stubs["delete_private_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/DeletePrivateConnection", + request_serializer=clouddms.DeletePrivateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_private_connection"] + + @property + def get_conversion_workspace( + self, + ) -> Callable[ + [clouddms.GetConversionWorkspaceRequest], + conversionworkspace_resources.ConversionWorkspace, + ]: + r"""Return a callable for the get conversion workspace method over gRPC. + + Gets details of a single conversion workspace. + + Returns: + Callable[[~.GetConversionWorkspaceRequest], + ~.ConversionWorkspace]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_conversion_workspace" not in self._stubs: + self._stubs["get_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/GetConversionWorkspace", + request_serializer=clouddms.GetConversionWorkspaceRequest.serialize, + response_deserializer=conversionworkspace_resources.ConversionWorkspace.deserialize, + ) + return self._stubs["get_conversion_workspace"] + + @property + def list_conversion_workspaces( + self, + ) -> Callable[ + [clouddms.ListConversionWorkspacesRequest], + clouddms.ListConversionWorkspacesResponse, + ]: + r"""Return a callable for the list conversion workspaces method over gRPC. + + Lists conversion workspaces in a given project and + location. + + Returns: + Callable[[~.ListConversionWorkspacesRequest], + ~.ListConversionWorkspacesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_conversion_workspaces" not in self._stubs: + self._stubs["list_conversion_workspaces"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ListConversionWorkspaces", + request_serializer=clouddms.ListConversionWorkspacesRequest.serialize, + response_deserializer=clouddms.ListConversionWorkspacesResponse.deserialize, + ) + return self._stubs["list_conversion_workspaces"] + + @property + def create_conversion_workspace( + self, + ) -> Callable[ + [clouddms.CreateConversionWorkspaceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create conversion workspace method over gRPC. + + Creates a new conversion workspace in a given project + and location. + + Returns: + Callable[[~.CreateConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_conversion_workspace" not in self._stubs: + self._stubs["create_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/CreateConversionWorkspace", + request_serializer=clouddms.CreateConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_conversion_workspace"] + + @property + def update_conversion_workspace( + self, + ) -> Callable[ + [clouddms.UpdateConversionWorkspaceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update conversion workspace method over gRPC. + + Updates the parameters of a single conversion + workspace. + + Returns: + Callable[[~.UpdateConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_conversion_workspace" not in self._stubs: + self._stubs["update_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/UpdateConversionWorkspace", + request_serializer=clouddms.UpdateConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_conversion_workspace"] + + @property + def delete_conversion_workspace( + self, + ) -> Callable[ + [clouddms.DeleteConversionWorkspaceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete conversion workspace method over gRPC. + + Deletes a single conversion workspace. + + Returns: + Callable[[~.DeleteConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_conversion_workspace" not in self._stubs: + self._stubs["delete_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/DeleteConversionWorkspace", + request_serializer=clouddms.DeleteConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_conversion_workspace"] + + @property + def seed_conversion_workspace( + self, + ) -> Callable[[clouddms.SeedConversionWorkspaceRequest], operations_pb2.Operation]: + r"""Return a callable for the seed conversion workspace method over gRPC. + + Imports a snapshot of the source database into the + conversion workspace. + + Returns: + Callable[[~.SeedConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "seed_conversion_workspace" not in self._stubs: + self._stubs["seed_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/SeedConversionWorkspace", + request_serializer=clouddms.SeedConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["seed_conversion_workspace"] + + @property + def import_mapping_rules( + self, + ) -> Callable[[clouddms.ImportMappingRulesRequest], operations_pb2.Operation]: + r"""Return a callable for the import mapping rules method over gRPC. + + Imports the mapping rules for a given conversion + workspace. Supports various formats of external rules + files. + + Returns: + Callable[[~.ImportMappingRulesRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_mapping_rules" not in self._stubs: + self._stubs["import_mapping_rules"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ImportMappingRules", + request_serializer=clouddms.ImportMappingRulesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_mapping_rules"] + + @property + def convert_conversion_workspace( + self, + ) -> Callable[ + [clouddms.ConvertConversionWorkspaceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the convert conversion workspace method over gRPC. + + Creates a draft tree schema for the destination + database. + + Returns: + Callable[[~.ConvertConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "convert_conversion_workspace" not in self._stubs: + self._stubs["convert_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ConvertConversionWorkspace", + request_serializer=clouddms.ConvertConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["convert_conversion_workspace"] + + @property + def commit_conversion_workspace( + self, + ) -> Callable[ + [clouddms.CommitConversionWorkspaceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the commit conversion workspace method over gRPC. + + Marks all the data in the conversion workspace as + committed. + + Returns: + Callable[[~.CommitConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "commit_conversion_workspace" not in self._stubs: + self._stubs["commit_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/CommitConversionWorkspace", + request_serializer=clouddms.CommitConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["commit_conversion_workspace"] + + @property + def rollback_conversion_workspace( + self, + ) -> Callable[ + [clouddms.RollbackConversionWorkspaceRequest], operations_pb2.Operation + ]: + r"""Return a callable for the rollback conversion workspace method over gRPC. + + Rolls back a conversion workspace to the last + committed snapshot. + + Returns: + Callable[[~.RollbackConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback_conversion_workspace" not in self._stubs: + self._stubs[ + "rollback_conversion_workspace" + ] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/RollbackConversionWorkspace", + request_serializer=clouddms.RollbackConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["rollback_conversion_workspace"] + + @property + def apply_conversion_workspace( + self, + ) -> Callable[[clouddms.ApplyConversionWorkspaceRequest], operations_pb2.Operation]: + r"""Return a callable for the apply conversion workspace method over gRPC. + + Applies draft tree onto a specific destination + database. + + Returns: + Callable[[~.ApplyConversionWorkspaceRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "apply_conversion_workspace" not in self._stubs: + self._stubs["apply_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ApplyConversionWorkspace", + request_serializer=clouddms.ApplyConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["apply_conversion_workspace"] + + @property + def describe_database_entities( + self, + ) -> Callable[ + [clouddms.DescribeDatabaseEntitiesRequest], + clouddms.DescribeDatabaseEntitiesResponse, + ]: + r"""Return a callable for the describe database entities method over gRPC. + + Describes the database entities tree for a specific + conversion workspace and a specific tree type. + + Database entities are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are simple data + objects describing the structure of the client database. + + Returns: + Callable[[~.DescribeDatabaseEntitiesRequest], + ~.DescribeDatabaseEntitiesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "describe_database_entities" not in self._stubs: + self._stubs["describe_database_entities"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/DescribeDatabaseEntities", + request_serializer=clouddms.DescribeDatabaseEntitiesRequest.serialize, + response_deserializer=clouddms.DescribeDatabaseEntitiesResponse.deserialize, + ) + return self._stubs["describe_database_entities"] + + @property + def search_background_jobs( + self, + ) -> Callable[ + [clouddms.SearchBackgroundJobsRequest], clouddms.SearchBackgroundJobsResponse + ]: + r"""Return a callable for the search background jobs method over gRPC. + + Searches/lists the background jobs for a specific + conversion workspace. + + The background jobs are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are a way to expose + the data plane jobs log. + + Returns: + Callable[[~.SearchBackgroundJobsRequest], + ~.SearchBackgroundJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_background_jobs" not in self._stubs: + self._stubs["search_background_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/SearchBackgroundJobs", + request_serializer=clouddms.SearchBackgroundJobsRequest.serialize, + response_deserializer=clouddms.SearchBackgroundJobsResponse.deserialize, + ) + return self._stubs["search_background_jobs"] + + @property + def describe_conversion_workspace_revisions( + self, + ) -> Callable[ + [clouddms.DescribeConversionWorkspaceRevisionsRequest], + clouddms.DescribeConversionWorkspaceRevisionsResponse, + ]: + r"""Return a callable for the describe conversion workspace + revisions method over gRPC. + + Retrieves a list of committed revisions of a specific + conversion workspace. + + Returns: + Callable[[~.DescribeConversionWorkspaceRevisionsRequest], + ~.DescribeConversionWorkspaceRevisionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "describe_conversion_workspace_revisions" not in self._stubs: + self._stubs[ + "describe_conversion_workspace_revisions" + ] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/DescribeConversionWorkspaceRevisions", + request_serializer=clouddms.DescribeConversionWorkspaceRevisionsRequest.serialize, + response_deserializer=clouddms.DescribeConversionWorkspaceRevisionsResponse.deserialize, + ) + return self._stubs["describe_conversion_workspace_revisions"] + + @property + def fetch_static_ips( + self, + ) -> Callable[[clouddms.FetchStaticIpsRequest], clouddms.FetchStaticIpsResponse]: + r"""Return a callable for the fetch static ips method over gRPC. + + Fetches a set of static IP addresses that need to be + allowlisted by the customer when using the static-IP + connectivity method. + + Returns: + Callable[[~.FetchStaticIpsRequest], + ~.FetchStaticIpsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_static_ips" not in self._stubs: + self._stubs["fetch_static_ips"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/FetchStaticIps", + request_serializer=clouddms.FetchStaticIpsRequest.serialize, + response_deserializer=clouddms.FetchStaticIpsResponse.deserialize, + ) + return self._stubs["fetch_static_ips"] + def close(self): self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + @property def kind(self) -> str: return "grpc" diff --git a/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py b/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py index 1942a50..376c302 100644 --- a/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py +++ b/google/cloud/clouddms_v1/services/data_migration_service/transports/grpc_asyncio.py @@ -19,11 +19,18 @@ from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.clouddms_v1.types import clouddms, clouddms_resources +from google.cloud.clouddms_v1.types import ( + clouddms, + clouddms_resources, + conversionworkspace_resources, +) from .base import DEFAULT_CLIENT_INFO, DataMigrationServiceTransport from .grpc import DataMigrationServiceGrpcTransport @@ -602,8 +609,8 @@ def list_connection_profiles( ]: r"""Return a callable for the list connection profiles method over gRPC. - Retrieve a list of all connection profiles in a given - project and location. + Retrieves a list of all connection profiles in a + given project and location. Returns: Callable[[~.ListConnectionProfilesRequest], @@ -741,8 +748,768 @@ def delete_connection_profile( ) return self._stubs["delete_connection_profile"] + @property + def create_private_connection( + self, + ) -> Callable[ + [clouddms.CreatePrivateConnectionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create private connection method over gRPC. + + Creates a new private connection in a given project + and location. + + Returns: + Callable[[~.CreatePrivateConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_private_connection" not in self._stubs: + self._stubs["create_private_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/CreatePrivateConnection", + request_serializer=clouddms.CreatePrivateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_private_connection"] + + @property + def get_private_connection( + self, + ) -> Callable[ + [clouddms.GetPrivateConnectionRequest], + Awaitable[clouddms_resources.PrivateConnection], + ]: + r"""Return a callable for the get private connection method over gRPC. + + Gets details of a single private connection. + + Returns: + Callable[[~.GetPrivateConnectionRequest], + Awaitable[~.PrivateConnection]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_private_connection" not in self._stubs: + self._stubs["get_private_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/GetPrivateConnection", + request_serializer=clouddms.GetPrivateConnectionRequest.serialize, + response_deserializer=clouddms_resources.PrivateConnection.deserialize, + ) + return self._stubs["get_private_connection"] + + @property + def list_private_connections( + self, + ) -> Callable[ + [clouddms.ListPrivateConnectionsRequest], + Awaitable[clouddms.ListPrivateConnectionsResponse], + ]: + r"""Return a callable for the list private connections method over gRPC. + + Retrieves a list of private connections in a given + project and location. + + Returns: + Callable[[~.ListPrivateConnectionsRequest], + Awaitable[~.ListPrivateConnectionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_private_connections" not in self._stubs: + self._stubs["list_private_connections"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ListPrivateConnections", + request_serializer=clouddms.ListPrivateConnectionsRequest.serialize, + response_deserializer=clouddms.ListPrivateConnectionsResponse.deserialize, + ) + return self._stubs["list_private_connections"] + + @property + def delete_private_connection( + self, + ) -> Callable[ + [clouddms.DeletePrivateConnectionRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete private connection method over gRPC. + + Deletes a single Database Migration Service private + connection. + + Returns: + Callable[[~.DeletePrivateConnectionRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_private_connection" not in self._stubs: + self._stubs["delete_private_connection"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/DeletePrivateConnection", + request_serializer=clouddms.DeletePrivateConnectionRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_private_connection"] + + @property + def get_conversion_workspace( + self, + ) -> Callable[ + [clouddms.GetConversionWorkspaceRequest], + Awaitable[conversionworkspace_resources.ConversionWorkspace], + ]: + r"""Return a callable for the get conversion workspace method over gRPC. + + Gets details of a single conversion workspace. + + Returns: + Callable[[~.GetConversionWorkspaceRequest], + Awaitable[~.ConversionWorkspace]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_conversion_workspace" not in self._stubs: + self._stubs["get_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/GetConversionWorkspace", + request_serializer=clouddms.GetConversionWorkspaceRequest.serialize, + response_deserializer=conversionworkspace_resources.ConversionWorkspace.deserialize, + ) + return self._stubs["get_conversion_workspace"] + + @property + def list_conversion_workspaces( + self, + ) -> Callable[ + [clouddms.ListConversionWorkspacesRequest], + Awaitable[clouddms.ListConversionWorkspacesResponse], + ]: + r"""Return a callable for the list conversion workspaces method over gRPC. + + Lists conversion workspaces in a given project and + location. + + Returns: + Callable[[~.ListConversionWorkspacesRequest], + Awaitable[~.ListConversionWorkspacesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_conversion_workspaces" not in self._stubs: + self._stubs["list_conversion_workspaces"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ListConversionWorkspaces", + request_serializer=clouddms.ListConversionWorkspacesRequest.serialize, + response_deserializer=clouddms.ListConversionWorkspacesResponse.deserialize, + ) + return self._stubs["list_conversion_workspaces"] + + @property + def create_conversion_workspace( + self, + ) -> Callable[ + [clouddms.CreateConversionWorkspaceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create conversion workspace method over gRPC. + + Creates a new conversion workspace in a given project + and location. + + Returns: + Callable[[~.CreateConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_conversion_workspace" not in self._stubs: + self._stubs["create_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/CreateConversionWorkspace", + request_serializer=clouddms.CreateConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_conversion_workspace"] + + @property + def update_conversion_workspace( + self, + ) -> Callable[ + [clouddms.UpdateConversionWorkspaceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update conversion workspace method over gRPC. + + Updates the parameters of a single conversion + workspace. + + Returns: + Callable[[~.UpdateConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_conversion_workspace" not in self._stubs: + self._stubs["update_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/UpdateConversionWorkspace", + request_serializer=clouddms.UpdateConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_conversion_workspace"] + + @property + def delete_conversion_workspace( + self, + ) -> Callable[ + [clouddms.DeleteConversionWorkspaceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete conversion workspace method over gRPC. + + Deletes a single conversion workspace. + + Returns: + Callable[[~.DeleteConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_conversion_workspace" not in self._stubs: + self._stubs["delete_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/DeleteConversionWorkspace", + request_serializer=clouddms.DeleteConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_conversion_workspace"] + + @property + def seed_conversion_workspace( + self, + ) -> Callable[ + [clouddms.SeedConversionWorkspaceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the seed conversion workspace method over gRPC. + + Imports a snapshot of the source database into the + conversion workspace. + + Returns: + Callable[[~.SeedConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "seed_conversion_workspace" not in self._stubs: + self._stubs["seed_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/SeedConversionWorkspace", + request_serializer=clouddms.SeedConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["seed_conversion_workspace"] + + @property + def import_mapping_rules( + self, + ) -> Callable[ + [clouddms.ImportMappingRulesRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the import mapping rules method over gRPC. + + Imports the mapping rules for a given conversion + workspace. Supports various formats of external rules + files. + + Returns: + Callable[[~.ImportMappingRulesRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "import_mapping_rules" not in self._stubs: + self._stubs["import_mapping_rules"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ImportMappingRules", + request_serializer=clouddms.ImportMappingRulesRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["import_mapping_rules"] + + @property + def convert_conversion_workspace( + self, + ) -> Callable[ + [clouddms.ConvertConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the convert conversion workspace method over gRPC. + + Creates a draft tree schema for the destination + database. + + Returns: + Callable[[~.ConvertConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "convert_conversion_workspace" not in self._stubs: + self._stubs["convert_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ConvertConversionWorkspace", + request_serializer=clouddms.ConvertConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["convert_conversion_workspace"] + + @property + def commit_conversion_workspace( + self, + ) -> Callable[ + [clouddms.CommitConversionWorkspaceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the commit conversion workspace method over gRPC. + + Marks all the data in the conversion workspace as + committed. + + Returns: + Callable[[~.CommitConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "commit_conversion_workspace" not in self._stubs: + self._stubs["commit_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/CommitConversionWorkspace", + request_serializer=clouddms.CommitConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["commit_conversion_workspace"] + + @property + def rollback_conversion_workspace( + self, + ) -> Callable[ + [clouddms.RollbackConversionWorkspaceRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the rollback conversion workspace method over gRPC. + + Rolls back a conversion workspace to the last + committed snapshot. + + Returns: + Callable[[~.RollbackConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback_conversion_workspace" not in self._stubs: + self._stubs[ + "rollback_conversion_workspace" + ] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/RollbackConversionWorkspace", + request_serializer=clouddms.RollbackConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["rollback_conversion_workspace"] + + @property + def apply_conversion_workspace( + self, + ) -> Callable[ + [clouddms.ApplyConversionWorkspaceRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the apply conversion workspace method over gRPC. + + Applies draft tree onto a specific destination + database. + + Returns: + Callable[[~.ApplyConversionWorkspaceRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "apply_conversion_workspace" not in self._stubs: + self._stubs["apply_conversion_workspace"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/ApplyConversionWorkspace", + request_serializer=clouddms.ApplyConversionWorkspaceRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["apply_conversion_workspace"] + + @property + def describe_database_entities( + self, + ) -> Callable[ + [clouddms.DescribeDatabaseEntitiesRequest], + Awaitable[clouddms.DescribeDatabaseEntitiesResponse], + ]: + r"""Return a callable for the describe database entities method over gRPC. + + Describes the database entities tree for a specific + conversion workspace and a specific tree type. + + Database entities are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are simple data + objects describing the structure of the client database. + + Returns: + Callable[[~.DescribeDatabaseEntitiesRequest], + Awaitable[~.DescribeDatabaseEntitiesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "describe_database_entities" not in self._stubs: + self._stubs["describe_database_entities"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/DescribeDatabaseEntities", + request_serializer=clouddms.DescribeDatabaseEntitiesRequest.serialize, + response_deserializer=clouddms.DescribeDatabaseEntitiesResponse.deserialize, + ) + return self._stubs["describe_database_entities"] + + @property + def search_background_jobs( + self, + ) -> Callable[ + [clouddms.SearchBackgroundJobsRequest], + Awaitable[clouddms.SearchBackgroundJobsResponse], + ]: + r"""Return a callable for the search background jobs method over gRPC. + + Searches/lists the background jobs for a specific + conversion workspace. + + The background jobs are not resources like conversion + workspaces or mapping rules, and they can't be created, + updated or deleted. Instead, they are a way to expose + the data plane jobs log. + + Returns: + Callable[[~.SearchBackgroundJobsRequest], + Awaitable[~.SearchBackgroundJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_background_jobs" not in self._stubs: + self._stubs["search_background_jobs"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/SearchBackgroundJobs", + request_serializer=clouddms.SearchBackgroundJobsRequest.serialize, + response_deserializer=clouddms.SearchBackgroundJobsResponse.deserialize, + ) + return self._stubs["search_background_jobs"] + + @property + def describe_conversion_workspace_revisions( + self, + ) -> Callable[ + [clouddms.DescribeConversionWorkspaceRevisionsRequest], + Awaitable[clouddms.DescribeConversionWorkspaceRevisionsResponse], + ]: + r"""Return a callable for the describe conversion workspace + revisions method over gRPC. + + Retrieves a list of committed revisions of a specific + conversion workspace. + + Returns: + Callable[[~.DescribeConversionWorkspaceRevisionsRequest], + Awaitable[~.DescribeConversionWorkspaceRevisionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "describe_conversion_workspace_revisions" not in self._stubs: + self._stubs[ + "describe_conversion_workspace_revisions" + ] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/DescribeConversionWorkspaceRevisions", + request_serializer=clouddms.DescribeConversionWorkspaceRevisionsRequest.serialize, + response_deserializer=clouddms.DescribeConversionWorkspaceRevisionsResponse.deserialize, + ) + return self._stubs["describe_conversion_workspace_revisions"] + + @property + def fetch_static_ips( + self, + ) -> Callable[ + [clouddms.FetchStaticIpsRequest], Awaitable[clouddms.FetchStaticIpsResponse] + ]: + r"""Return a callable for the fetch static ips method over gRPC. + + Fetches a set of static IP addresses that need to be + allowlisted by the customer when using the static-IP + connectivity method. + + Returns: + Callable[[~.FetchStaticIpsRequest], + Awaitable[~.FetchStaticIpsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "fetch_static_ips" not in self._stubs: + self._stubs["fetch_static_ips"] = self.grpc_channel.unary_unary( + "/google.cloud.clouddms.v1.DataMigrationService/FetchStaticIps", + request_serializer=clouddms.FetchStaticIpsRequest.serialize, + response_deserializer=clouddms.FetchStaticIpsResponse.deserialize, + ) + return self._stubs["fetch_static_ips"] + def close(self): return self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + __all__ = ("DataMigrationServiceGrpcAsyncIOTransport",) diff --git a/google/cloud/clouddms_v1/types/__init__.py b/google/cloud/clouddms_v1/types/__init__.py index cb3b44b..284b203 100644 --- a/google/cloud/clouddms_v1/types/__init__.py +++ b/google/cloud/clouddms_v1/types/__init__.py @@ -14,87 +14,203 @@ # limitations under the License. # from .clouddms import ( + ApplyConversionWorkspaceRequest, + CommitConversionWorkspaceRequest, + ConvertConversionWorkspaceRequest, CreateConnectionProfileRequest, + CreateConversionWorkspaceRequest, CreateMigrationJobRequest, + CreatePrivateConnectionRequest, DeleteConnectionProfileRequest, + DeleteConversionWorkspaceRequest, DeleteMigrationJobRequest, + DeletePrivateConnectionRequest, + DescribeConversionWorkspaceRevisionsRequest, + DescribeConversionWorkspaceRevisionsResponse, + DescribeDatabaseEntitiesRequest, + DescribeDatabaseEntitiesResponse, + FetchStaticIpsRequest, + FetchStaticIpsResponse, GenerateSshScriptRequest, GetConnectionProfileRequest, + GetConversionWorkspaceRequest, GetMigrationJobRequest, + GetPrivateConnectionRequest, + ImportMappingRulesRequest, ListConnectionProfilesRequest, ListConnectionProfilesResponse, + ListConversionWorkspacesRequest, + ListConversionWorkspacesResponse, ListMigrationJobsRequest, ListMigrationJobsResponse, + ListPrivateConnectionsRequest, + ListPrivateConnectionsResponse, OperationMetadata, PromoteMigrationJobRequest, RestartMigrationJobRequest, ResumeMigrationJobRequest, + RollbackConversionWorkspaceRequest, + SearchBackgroundJobsRequest, + SearchBackgroundJobsResponse, + SeedConversionWorkspaceRequest, SshScript, StartMigrationJobRequest, StopMigrationJobRequest, UpdateConnectionProfileRequest, + UpdateConversionWorkspaceRequest, UpdateMigrationJobRequest, VerifyMigrationJobRequest, VmCreationConfig, VmSelectionConfig, ) from .clouddms_resources import ( + AlloyDbConnectionProfile, + AlloyDbSettings, CloudSqlConnectionProfile, CloudSqlSettings, ConnectionProfile, + ConversionWorkspaceInfo, DatabaseEngine, DatabaseProvider, DatabaseType, + ForwardSshTunnelConnectivity, MigrationJob, MigrationJobVerificationError, MySqlConnectionProfile, + NetworkArchitecture, + OracleConnectionProfile, PostgreSqlConnectionProfile, + PrivateConnection, + PrivateConnectivity, + PrivateServiceConnectConnectivity, ReverseSshConnectivity, SqlAclEntry, SqlIpConfig, SslConfig, StaticIpConnectivity, + StaticServiceIpConnectivity, + VpcPeeringConfig, VpcPeeringConnectivity, ) +from .conversionworkspace_resources import ( + BackgroundJobLogEntry, + BackgroundJobType, + ColumnEntity, + ConstraintEntity, + ConversionWorkspace, + DatabaseEngineInfo, + DatabaseEntity, + DatabaseEntityType, + EntityMapping, + EntityMappingLogEntry, + FunctionEntity, + ImportRulesFileFormat, + IndexEntity, + PackageEntity, + SchemaEntity, + SequenceEntity, + StoredProcedureEntity, + SynonymEntity, + TableEntity, + TriggerEntity, + ViewEntity, +) __all__ = ( + "ApplyConversionWorkspaceRequest", + "CommitConversionWorkspaceRequest", + "ConvertConversionWorkspaceRequest", "CreateConnectionProfileRequest", + "CreateConversionWorkspaceRequest", "CreateMigrationJobRequest", + "CreatePrivateConnectionRequest", "DeleteConnectionProfileRequest", + "DeleteConversionWorkspaceRequest", "DeleteMigrationJobRequest", + "DeletePrivateConnectionRequest", + "DescribeConversionWorkspaceRevisionsRequest", + "DescribeConversionWorkspaceRevisionsResponse", + "DescribeDatabaseEntitiesRequest", + "DescribeDatabaseEntitiesResponse", + "FetchStaticIpsRequest", + "FetchStaticIpsResponse", "GenerateSshScriptRequest", "GetConnectionProfileRequest", + "GetConversionWorkspaceRequest", "GetMigrationJobRequest", + "GetPrivateConnectionRequest", + "ImportMappingRulesRequest", "ListConnectionProfilesRequest", "ListConnectionProfilesResponse", + "ListConversionWorkspacesRequest", + "ListConversionWorkspacesResponse", "ListMigrationJobsRequest", "ListMigrationJobsResponse", + "ListPrivateConnectionsRequest", + "ListPrivateConnectionsResponse", "OperationMetadata", "PromoteMigrationJobRequest", "RestartMigrationJobRequest", "ResumeMigrationJobRequest", + "RollbackConversionWorkspaceRequest", + "SearchBackgroundJobsRequest", + "SearchBackgroundJobsResponse", + "SeedConversionWorkspaceRequest", "SshScript", "StartMigrationJobRequest", "StopMigrationJobRequest", "UpdateConnectionProfileRequest", + "UpdateConversionWorkspaceRequest", "UpdateMigrationJobRequest", "VerifyMigrationJobRequest", "VmCreationConfig", "VmSelectionConfig", + "AlloyDbConnectionProfile", + "AlloyDbSettings", "CloudSqlConnectionProfile", "CloudSqlSettings", "ConnectionProfile", + "ConversionWorkspaceInfo", "DatabaseType", + "ForwardSshTunnelConnectivity", "MigrationJob", "MigrationJobVerificationError", "MySqlConnectionProfile", + "OracleConnectionProfile", "PostgreSqlConnectionProfile", + "PrivateConnection", + "PrivateConnectivity", + "PrivateServiceConnectConnectivity", "ReverseSshConnectivity", "SqlAclEntry", "SqlIpConfig", "SslConfig", "StaticIpConnectivity", + "StaticServiceIpConnectivity", + "VpcPeeringConfig", "VpcPeeringConnectivity", "DatabaseEngine", "DatabaseProvider", + "NetworkArchitecture", + "BackgroundJobLogEntry", + "ColumnEntity", + "ConstraintEntity", + "ConversionWorkspace", + "DatabaseEngineInfo", + "DatabaseEntity", + "EntityMapping", + "EntityMappingLogEntry", + "FunctionEntity", + "IndexEntity", + "PackageEntity", + "SchemaEntity", + "SequenceEntity", + "StoredProcedureEntity", + "SynonymEntity", + "TableEntity", + "TriggerEntity", + "ViewEntity", + "BackgroundJobType", + "DatabaseEntityType", + "ImportRulesFileFormat", ) diff --git a/google/cloud/clouddms_v1/types/clouddms.py b/google/cloud/clouddms_v1/types/clouddms.py index 089aa49..9cf97e1 100644 --- a/google/cloud/clouddms_v1/types/clouddms.py +++ b/google/cloud/clouddms_v1/types/clouddms.py @@ -21,7 +21,10 @@ from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore -from google.cloud.clouddms_v1.types import clouddms_resources +from google.cloud.clouddms_v1.types import ( + clouddms_resources, + conversionworkspace_resources, +) __protobuf__ = proto.module( package="google.cloud.clouddms.v1", @@ -48,25 +51,50 @@ "CreateConnectionProfileRequest", "UpdateConnectionProfileRequest", "DeleteConnectionProfileRequest", + "CreatePrivateConnectionRequest", + "ListPrivateConnectionsRequest", + "ListPrivateConnectionsResponse", + "DeletePrivateConnectionRequest", + "GetPrivateConnectionRequest", "OperationMetadata", + "ListConversionWorkspacesRequest", + "ListConversionWorkspacesResponse", + "GetConversionWorkspaceRequest", + "CreateConversionWorkspaceRequest", + "UpdateConversionWorkspaceRequest", + "DeleteConversionWorkspaceRequest", + "CommitConversionWorkspaceRequest", + "RollbackConversionWorkspaceRequest", + "ApplyConversionWorkspaceRequest", + "SeedConversionWorkspaceRequest", + "ConvertConversionWorkspaceRequest", + "ImportMappingRulesRequest", + "DescribeDatabaseEntitiesRequest", + "DescribeDatabaseEntitiesResponse", + "SearchBackgroundJobsRequest", + "SearchBackgroundJobsResponse", + "DescribeConversionWorkspaceRevisionsRequest", + "DescribeConversionWorkspaceRevisionsResponse", + "FetchStaticIpsRequest", + "FetchStaticIpsResponse", }, ) class ListMigrationJobsRequest(proto.Message): - r"""Retrieve a list of all migration jobs in a given project and + r"""Retrieves a list of all migration jobs in a given project and location. Attributes: parent (str): - Required. The parent, which owns this + Required. The parent which owns this collection of migrationJobs. page_size (int): The maximum number of migration jobs to return. The service may return fewer than this value. If unspecified, at most 50 migration jobs will be returned. The maximum value is 1000; - values above 1000 will be coerced to 1000. + values above 1000 are coerced to 1000. page_token (str): The nextPageToken value received in the previous call to migrationJobs.list, used in the @@ -123,7 +151,7 @@ class ListMigrationJobsResponse(proto.Message): migration_jobs (MutableSequence[google.cloud.clouddms_v1.types.MigrationJob]): The list of migration jobs objects. next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the + A token which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. unreachable (MutableSequence[str]): @@ -172,7 +200,7 @@ class CreateMigrationJobRequest(proto.Message): Attributes: parent (str): - Required. The parent, which owns this + Required. The parent which owns this collection of migration jobs. migration_job_id (str): Required. The ID of the instance to create. @@ -181,13 +209,13 @@ class CreateMigrationJobRequest(proto.Message): job `__ object. request_id (str): - A unique id used to identify the request. If the server - receives two requests with the same id, then the second - request will be ignored. + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. It is recommended to always set this value to a UUID. - The id must contain only letters (a-z, A-Z), numbers (0-9), + The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). The maximum length is 40 characters. """ @@ -217,19 +245,19 @@ class UpdateMigrationJobRequest(proto.Message): Attributes: update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to specify the - fields to be overwritten in the migration job - resource by the update. + fields to be overwritten by the update in the + conversion workspace resource. migration_job (google.cloud.clouddms_v1.types.MigrationJob): Required. The migration job parameters to update. request_id (str): - A unique id used to identify the request. If the server - receives two requests with the same id, then the second - request will be ignored. + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. It is recommended to always set this value to a UUID. - The id must contain only letters (a-z, A-Z), numbers (0-9), + The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). The maximum length is 40 characters. """ @@ -258,13 +286,13 @@ class DeleteMigrationJobRequest(proto.Message): Required. Name of the migration job resource to delete. request_id (str): - A unique id used to identify the request. If the server - receives two requests with the same id, then the second - request will be ignored. + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. It is recommended to always set this value to a UUID. - The id must contain only letters (a-z, A-Z), numbers (0-9), + The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). The maximum length is 40 characters. force (bool): @@ -401,7 +429,7 @@ class GenerateSshScriptRequest(proto.Message): This field is a member of `oneof`_ ``vm_config``. vm_port (int): The port that will be open on the bastion - host + host. """ migration_job: str = proto.Field( @@ -492,14 +520,14 @@ class ListConnectionProfilesRequest(proto.Message): Attributes: parent (str): - Required. The parent, which owns this + Required. The parent which owns this collection of connection profiles. page_size (int): The maximum number of connection profiles to return. The service may return fewer than this value. If unspecified, at most 50 connection profiles will be returned. The maximum value is - 1000; values above 1000 will be coerced to 1000. + 1000; values above 1000 are coerced to 1000. page_token (str): A page token, received from a previous ``ListConnectionProfiles`` call. Provide this to retrieve @@ -521,7 +549,8 @@ class ListConnectionProfilesRequest(proto.Message): %lt;my_username%gt;** to list all connection profiles configured to connect with a specific username. order_by (str): - the order by fields for the result. + A comma-separated list of fields to order + results according to. """ parent: str = proto.Field( @@ -553,7 +582,7 @@ class ListConnectionProfilesResponse(proto.Message): connection_profiles (MutableSequence[google.cloud.clouddms_v1.types.ConnectionProfile]): The response list of connection profiles. next_page_token (str): - A token, which can be sent as ``page_token`` to retrieve the + A token which can be sent as ``page_token`` to retrieve the next page. If this field is omitted, there are no subsequent pages. unreachable (MutableSequence[str]): @@ -601,7 +630,7 @@ class CreateConnectionProfileRequest(proto.Message): Attributes: parent (str): - Required. The parent, which owns this + Required. The parent which owns this collection of connection profiles. connection_profile_id (str): Required. The connection profile identifier. @@ -609,15 +638,24 @@ class CreateConnectionProfileRequest(proto.Message): Required. The create request body including the connection profile data request_id (str): - A unique id used to identify the request. If the server - receives two requests with the same id, then the second - request will be ignored. + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. It is recommended to always set this value to a UUID. - The id must contain only letters (a-z, A-Z), numbers (0-9), + The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). The maximum length is 40 characters. + validate_only (bool): + Optional. Only validate the connection + profile, but don't create any resources. The + default is false. Only supported for Oracle + connection profiles. + skip_validation (bool): + Optional. Create the connection profile + without validating it. The default is false. + Only supported for Oracle connection profiles. """ parent: str = proto.Field( @@ -637,6 +675,14 @@ class CreateConnectionProfileRequest(proto.Message): proto.STRING, number=4, ) + validate_only: bool = proto.Field( + proto.BOOL, + number=5, + ) + skip_validation: bool = proto.Field( + proto.BOOL, + number=6, + ) class UpdateConnectionProfileRequest(proto.Message): @@ -645,21 +691,30 @@ class UpdateConnectionProfileRequest(proto.Message): Attributes: update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Field mask is used to specify the - fields to be overwritten in the connection - profile resource by the update. + fields to be overwritten by the update in the + conversion workspace resource. connection_profile (google.cloud.clouddms_v1.types.ConnectionProfile): Required. The connection profile parameters to update. request_id (str): - A unique id used to identify the request. If the server - receives two requests with the same id, then the second - request will be ignored. + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. It is recommended to always set this value to a UUID. - The id must contain only letters (a-z, A-Z), numbers (0-9), + The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). The maximum length is 40 characters. + validate_only (bool): + Optional. Only validate the connection + profile, but don't update any resources. The + default is false. Only supported for Oracle + connection profiles. + skip_validation (bool): + Optional. Update the connection profile + without validating it. The default is false. + Only supported for Oracle connection profiles. """ update_mask: field_mask_pb2.FieldMask = proto.Field( @@ -676,6 +731,14 @@ class UpdateConnectionProfileRequest(proto.Message): proto.STRING, number=3, ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + skip_validation: bool = proto.Field( + proto.BOOL, + number=5, + ) class DeleteConnectionProfileRequest(proto.Message): @@ -686,13 +749,13 @@ class DeleteConnectionProfileRequest(proto.Message): Required. Name of the connection profile resource to delete. request_id (str): - A unique id used to identify the request. If the server - receives two requests with the same id, then the second - request will be ignored. + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. It is recommended to always set this value to a UUID. - The id must contain only letters (a-z, A-Z), numbers (0-9), + The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), and hyphens (-). The maximum length is 40 characters. force (bool): @@ -715,6 +778,192 @@ class DeleteConnectionProfileRequest(proto.Message): ) +class CreatePrivateConnectionRequest(proto.Message): + r"""Request message to create a new private connection in the + specified project and region. + + Attributes: + parent (str): + Required. The parent that owns the collection + of PrivateConnections. + private_connection_id (str): + Required. The private connection identifier. + private_connection (google.cloud.clouddms_v1.types.PrivateConnection): + Required. The private connection resource to + create. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + skip_validation (bool): + Optional. If set to true, will skip + validations. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + private_connection_id: str = proto.Field( + proto.STRING, + number=2, + ) + private_connection: clouddms_resources.PrivateConnection = proto.Field( + proto.MESSAGE, + number=3, + message=clouddms_resources.PrivateConnection, + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + skip_validation: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class ListPrivateConnectionsRequest(proto.Message): + r"""Request message to retrieve a list of private connections in + a given project and location. + + Attributes: + parent (str): + Required. The parent that owns the collection + of private connections. + page_size (int): + Maximum number of private connections to + return. If unspecified, at most 50 private + connections that are returned. The maximum value + is 1000; values above 1000 are coerced to 1000. + page_token (str): + Page token received from a previous + ``ListPrivateConnections`` call. Provide this to retrieve + the subsequent page. + + When paginating, all other parameters provided to + ``ListPrivateConnections`` must match the call that provided + the page token. + filter (str): + A filter expression that filters private connections listed + in the response. The expression must specify the field name, + a comparison operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The comparison operator must be either =, !=, >, or + <. For example, list private connections created this year + by specifying **createTime %gt; + 2021-01-01T00:00:00.000000000Z**. + order_by (str): + Order by fields for the result. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListPrivateConnectionsResponse(proto.Message): + r"""Response message for 'ListPrivateConnections' request. + + Attributes: + private_connections (MutableSequence[google.cloud.clouddms_v1.types.PrivateConnection]): + List of private connections. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + private_connections: MutableSequence[ + clouddms_resources.PrivateConnection + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=clouddms_resources.PrivateConnection, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DeletePrivateConnectionRequest(proto.Message): + r"""Request message to delete a private connection. + + Attributes: + name (str): + Required. The name of the private connection + to delete. + request_id (str): + Optional. A unique ID used to identify the request. If the + server receives two requests with the same ID, then the + second request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetPrivateConnectionRequest(proto.Message): + r"""Request message to get a private connection resource. + + Attributes: + name (str): + Required. The name of the private connection + to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + class OperationMetadata(proto.Message): r"""Represents the metadata of the long-running operation. @@ -777,4 +1026,713 @@ class OperationMetadata(proto.Message): ) +class ListConversionWorkspacesRequest(proto.Message): + r"""Retrieve a list of all conversion workspaces in a given + project and location. + + Attributes: + parent (str): + Required. The parent which owns this + collection of conversion workspaces. + page_size (int): + The maximum number of conversion workspaces + to return. The service may return fewer than + this value. If unspecified, at most 50 sets are + returned. + page_token (str): + The nextPageToken value received in the + previous call to conversionWorkspaces.list, used + in the subsequent request to retrieve the next + page of results. On first call this should be + left blank. When paginating, all other + parameters provided to conversionWorkspaces.list + must match the call that provided the page + token. + filter (str): + A filter expression that filters conversion workspaces + listed in the response. The expression must specify the + field name, a comparison operator, and the value that you + want to use for filtering. The value must be a string, a + number, or a boolean. The comparison operator must be either + =, !=, >, or <. For example, list conversion workspaces + created this year by specifying **createTime %gt; + 2020-01-01T00:00:00.000000000Z.** You can also filter nested + fields. For example, you could specify **source.version = + "12.c.1"** to select all conversion workspaces with source + database version equal to 12.c.1. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListConversionWorkspacesResponse(proto.Message): + r"""Response message for 'ListConversionWorkspaces' request. + + Attributes: + conversion_workspaces (MutableSequence[google.cloud.clouddms_v1.types.ConversionWorkspace]): + The list of conversion workspace objects. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + conversion_workspaces: MutableSequence[ + conversionworkspace_resources.ConversionWorkspace + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.ConversionWorkspace, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class GetConversionWorkspaceRequest(proto.Message): + r"""Request message for 'GetConversionWorkspace' request. + + Attributes: + name (str): + Required. Name of the conversion workspace + resource to get. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateConversionWorkspaceRequest(proto.Message): + r"""Request message to create a new Conversion Workspace + in the specified project and region. + + Attributes: + parent (str): + Required. The parent which owns this + collection of conversion workspaces. + conversion_workspace_id (str): + Required. The ID of the conversion workspace + to create. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): + Required. Represents a conversion workspace + object. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + conversion_workspace_id: str = proto.Field( + proto.STRING, + number=2, + ) + conversion_workspace: conversionworkspace_resources.ConversionWorkspace = ( + proto.Field( + proto.MESSAGE, + number=3, + message=conversionworkspace_resources.ConversionWorkspace, + ) + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateConversionWorkspaceRequest(proto.Message): + r"""Request message for 'UpdateConversionWorkspace' request. + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Field mask is used to specify the + fields to be overwritten by the update in the + conversion workspace resource. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspace): + Required. The conversion workspace parameters + to update. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + conversion_workspace: conversionworkspace_resources.ConversionWorkspace = ( + proto.Field( + proto.MESSAGE, + number=2, + message=conversionworkspace_resources.ConversionWorkspace, + ) + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteConversionWorkspaceRequest(proto.Message): + r"""Request message for 'DeleteConversionWorkspace' request. + + Attributes: + name (str): + Required. Name of the conversion workspace + resource to delete. + request_id (str): + A unique ID used to identify the request. If the server + receives two requests with the same ID, then the second + request is ignored. + + It is recommended to always set this value to a UUID. + + The ID must contain only letters (a-z, A-Z), numbers (0-9), + underscores (_), and hyphens (-). The maximum length is 40 + characters. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CommitConversionWorkspaceRequest(proto.Message): + r"""Request message for 'CommitConversionWorkspace' request. + + Attributes: + name (str): + Required. Name of the conversion workspace + resource to commit. + commit_name (str): + Optional. Optional name of the commit. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + commit_name: str = proto.Field( + proto.STRING, + number=2, + ) + + +class RollbackConversionWorkspaceRequest(proto.Message): + r"""Request message for 'RollbackConversionWorkspace' request. + + Attributes: + name (str): + Required. Name of the conversion workspace + resource to roll back to. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ApplyConversionWorkspaceRequest(proto.Message): + r"""Request message for 'ApplyConversionWorkspace' request. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The name of the conversion workspace resource for + which to apply the draft tree. Must be in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + filter (str): + Filter which entities to apply. Leaving this + field empty will apply all of the entities. + Supports Google AIP 160 based filtering. + connection_profile (str): + Fully qualified (Uri) name of the destination + connection profile. + + This field is a member of `oneof`_ ``destination``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + connection_profile: str = proto.Field( + proto.STRING, + number=100, + oneof="destination", + ) + + +class SeedConversionWorkspaceRequest(proto.Message): + r"""Request message for 'SeedConversionWorkspace' request. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Name of the conversion workspace resource to seed with new + database structure, in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + auto_commit (bool): + Should the conversion workspace be committed + automatically after the seed operation. + source_connection_profile (str): + Fully qualified (Uri) name of the source + connection profile. + + This field is a member of `oneof`_ ``seed_from``. + destination_connection_profile (str): + Fully qualified (Uri) name of the destination + connection profile. + + This field is a member of `oneof`_ ``seed_from``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + auto_commit: bool = proto.Field( + proto.BOOL, + number=2, + ) + source_connection_profile: str = proto.Field( + proto.STRING, + number=100, + oneof="seed_from", + ) + destination_connection_profile: str = proto.Field( + proto.STRING, + number=101, + oneof="seed_from", + ) + + +class ConvertConversionWorkspaceRequest(proto.Message): + r"""Request message for 'ConvertConversionWorkspace' request. + + Attributes: + name (str): + Name of the conversion workspace resource to convert in the + form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + auto_commit (bool): + Specifies whether the conversion workspace is + to be committed automatically after the + conversion. + filter (str): + Filter the entities to convert. Leaving this + field empty will convert all of the entities. + Supports Google AIP-160 style filtering. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + auto_commit: bool = proto.Field( + proto.BOOL, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ImportMappingRulesRequest(proto.Message): + r"""Request message for 'ImportMappingRules' request. + + Attributes: + parent (str): + Required. Name of the conversion workspace resource to + import the rules to in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + rules_format (google.cloud.clouddms_v1.types.ImportRulesFileFormat): + The format of the rules content file. + rules_files (MutableSequence[google.cloud.clouddms_v1.types.ImportMappingRulesRequest.RulesFile]): + One or more rules files. + auto_commit (bool): + Should the conversion workspace be committed + automatically after the import operation. + """ + + class RulesFile(proto.Message): + r"""Details of a single rules file. + + Attributes: + rules_source_filename (str): + The filename of the rules that needs to be + converted. The filename is used mainly so that + future logs of the import rules job contain it, + and can therefore be searched by it. + rules_content (str): + The text content of the rules that needs to + be converted. + """ + + rules_source_filename: str = proto.Field( + proto.STRING, + number=1, + ) + rules_content: str = proto.Field( + proto.STRING, + number=2, + ) + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + rules_format: conversionworkspace_resources.ImportRulesFileFormat = proto.Field( + proto.ENUM, + number=2, + enum=conversionworkspace_resources.ImportRulesFileFormat, + ) + rules_files: MutableSequence[RulesFile] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=RulesFile, + ) + auto_commit: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class DescribeDatabaseEntitiesRequest(proto.Message): + r"""Request message for 'DescribeDatabaseEntities' request. + + Attributes: + conversion_workspace (str): + Required. Name of the conversion workspace resource whose + database entities are described. Must be in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + page_size (int): + The maximum number of entities to return. The + service may return fewer entities than the value + specifies. + page_token (str): + The nextPageToken value received in the + previous call to + conversionWorkspace.describeDatabaseEntities, + used in the subsequent request to retrieve the + next page of results. On first call this should + be left blank. When paginating, all other + parameters provided to + conversionWorkspace.describeDatabaseEntities + must match the call that provided the page + token. + tree (google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest.DBTreeType): + The tree to fetch. + uncommitted (bool): + Whether to retrieve the latest committed version of the + entities or the latest version. This field is ignored if a + specific commit_id is specified. + commit_id (str): + Request a specific commit ID. If not + specified, the entities from the latest commit + are returned. + filter (str): + Filter the returned entities based on AIP-160 + standard. + """ + + class DBTreeType(proto.Enum): + r"""The type of a tree to return + + Values: + DB_TREE_TYPE_UNSPECIFIED (0): + Unspecified tree type. + SOURCE_TREE (1): + The source database tree. + DRAFT_TREE (2): + The draft database tree. + DESTINATION_TREE (3): + The destination database tree. + """ + DB_TREE_TYPE_UNSPECIFIED = 0 + SOURCE_TREE = 1 + DRAFT_TREE = 2 + DESTINATION_TREE = 3 + + conversion_workspace: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + tree: DBTreeType = proto.Field( + proto.ENUM, + number=6, + enum=DBTreeType, + ) + uncommitted: bool = proto.Field( + proto.BOOL, + number=11, + ) + commit_id: str = proto.Field( + proto.STRING, + number=12, + ) + filter: str = proto.Field( + proto.STRING, + number=13, + ) + + +class DescribeDatabaseEntitiesResponse(proto.Message): + r"""Response message for 'DescribeDatabaseEntities' request. + + Attributes: + database_entities (MutableSequence[google.cloud.clouddms_v1.types.DatabaseEntity]): + The list of database entities for the + conversion workspace. + next_page_token (str): + A token which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + database_entities: MutableSequence[ + conversionworkspace_resources.DatabaseEntity + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.DatabaseEntity, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SearchBackgroundJobsRequest(proto.Message): + r"""Request message for 'SearchBackgroundJobs' request. + + Attributes: + conversion_workspace (str): + Required. Name of the conversion workspace resource whose + jobs are listed, in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + return_most_recent_per_job_type (bool): + Optional. Whether or not to return just the + most recent job per job type, + max_size (int): + Optional. The maximum number of jobs to + return. The service may return fewer than this + value. If unspecified, at most 100 jobs are + returned. The maximum value is 100; values above + 100 are coerced to 100. + completed_until_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. If provided, only returns jobs that + completed until (not including) the given + timestamp. + """ + + conversion_workspace: str = proto.Field( + proto.STRING, + number=1, + ) + return_most_recent_per_job_type: bool = proto.Field( + proto.BOOL, + number=2, + ) + max_size: int = proto.Field( + proto.INT32, + number=3, + ) + completed_until_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + + +class SearchBackgroundJobsResponse(proto.Message): + r"""Response message for 'SearchBackgroundJobs' request. + + Attributes: + jobs (MutableSequence[google.cloud.clouddms_v1.types.BackgroundJobLogEntry]): + The list of conversion workspace mapping + rules. + """ + + jobs: MutableSequence[ + conversionworkspace_resources.BackgroundJobLogEntry + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.BackgroundJobLogEntry, + ) + + +class DescribeConversionWorkspaceRevisionsRequest(proto.Message): + r"""Request message for 'DescribeConversionWorkspaceRevisions' + request. + + Attributes: + conversion_workspace (str): + Required. Name of the conversion workspace resource whose + revisions are listed. Must be in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + commit_id (str): + Optional. Optional filter to request a + specific commit ID. + """ + + conversion_workspace: str = proto.Field( + proto.STRING, + number=1, + ) + commit_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DescribeConversionWorkspaceRevisionsResponse(proto.Message): + r"""Response message for 'DescribeConversionWorkspaceRevisions' + request. + + Attributes: + revisions (MutableSequence[google.cloud.clouddms_v1.types.ConversionWorkspace]): + The list of conversion workspace revisions. + """ + + revisions: MutableSequence[ + conversionworkspace_resources.ConversionWorkspace + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=conversionworkspace_resources.ConversionWorkspace, + ) + + +class FetchStaticIpsRequest(proto.Message): + r"""Request message for 'FetchStaticIps' request. + + Attributes: + name (str): + Required. The resource name for the location for which + static IPs should be returned. Must be in the format + ``projects/*/locations/*``. + page_size (int): + Maximum number of IPs to return. + page_token (str): + A page token, received from a previous ``FetchStaticIps`` + call. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class FetchStaticIpsResponse(proto.Message): + r"""Response message for a 'FetchStaticIps' request. + + Attributes: + static_ips (MutableSequence[str]): + List of static IPs. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + static_ips: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/clouddms_v1/types/clouddms_resources.py b/google/cloud/clouddms_v1/types/clouddms_resources.py index bfea88a..1e51da9 100644 --- a/google/cloud/clouddms_v1/types/clouddms_resources.py +++ b/google/cloud/clouddms_v1/types/clouddms_resources.py @@ -26,26 +26,55 @@ __protobuf__ = proto.module( package="google.cloud.clouddms.v1", manifest={ + "NetworkArchitecture", "DatabaseEngine", "DatabaseProvider", "SslConfig", "MySqlConnectionProfile", "PostgreSqlConnectionProfile", + "OracleConnectionProfile", "CloudSqlConnectionProfile", + "AlloyDbConnectionProfile", "SqlAclEntry", "SqlIpConfig", "CloudSqlSettings", + "AlloyDbSettings", "StaticIpConnectivity", + "PrivateServiceConnectConnectivity", "ReverseSshConnectivity", "VpcPeeringConnectivity", + "ForwardSshTunnelConnectivity", + "StaticServiceIpConnectivity", + "PrivateConnectivity", "DatabaseType", "MigrationJob", + "ConversionWorkspaceInfo", "ConnectionProfile", "MigrationJobVerificationError", + "PrivateConnection", + "VpcPeeringConfig", }, ) +class NetworkArchitecture(proto.Enum): + r""" + + Values: + NETWORK_ARCHITECTURE_UNSPECIFIED (0): + No description available. + NETWORK_ARCHITECTURE_OLD_CSQL_PRODUCER (1): + Instance is in Cloud SQL's old producer + network architecture. + NETWORK_ARCHITECTURE_NEW_CSQL_PRODUCER (2): + Instance is in Cloud SQL's new producer + network architecture. + """ + NETWORK_ARCHITECTURE_UNSPECIFIED = 0 + NETWORK_ARCHITECTURE_OLD_CSQL_PRODUCER = 1 + NETWORK_ARCHITECTURE_NEW_CSQL_PRODUCER = 2 + + class DatabaseEngine(proto.Enum): r"""The database engine types. @@ -57,10 +86,13 @@ class DatabaseEngine(proto.Enum): The source engine is MySQL. POSTGRESQL (2): The source engine is PostgreSQL. + ORACLE (4): + The source engine is Oracle. """ DATABASE_ENGINE_UNSPECIFIED = 0 MYSQL = 1 POSTGRESQL = 2 + ORACLE = 4 class DatabaseProvider(proto.Enum): @@ -73,10 +105,16 @@ class DatabaseProvider(proto.Enum): CloudSQL runs the database. RDS (2): RDS runs the database. + AURORA (3): + Amazon Aurora. + ALLOYDB (4): + AlloyDB. """ DATABASE_PROVIDER_UNSPECIFIED = 0 CLOUDSQL = 1 RDS = 2 + AURORA = 3 + ALLOYDB = 4 class SslConfig(proto.Message): @@ -209,6 +247,13 @@ class PostgreSqlConnectionProfile(proto.Message): r"""Specifies connection parameters required specifically for PostgreSQL databases. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: host (str): Required. The IP or hostname of the source @@ -238,6 +283,19 @@ class PostgreSqlConnectionProfile(proto.Message): If the source is a Cloud SQL database, use this field to provide the Cloud SQL instance ID of the source. + network_architecture (google.cloud.clouddms_v1.types.NetworkArchitecture): + Output only. If the source is a Cloud SQL + database, this field indicates the network + architecture it's associated with. + static_ip_connectivity (google.cloud.clouddms_v1.types.StaticIpConnectivity): + Static ip connectivity data (default, no + additional details needed). + + This field is a member of `oneof`_ ``connectivity``. + private_service_connect_connectivity (google.cloud.clouddms_v1.types.PrivateServiceConnectConnectivity): + Private service connect connectivity. + + This field is a member of `oneof`_ ``connectivity``. """ host: str = proto.Field( @@ -269,6 +327,119 @@ class PostgreSqlConnectionProfile(proto.Message): proto.STRING, number=7, ) + network_architecture: "NetworkArchitecture" = proto.Field( + proto.ENUM, + number=8, + enum="NetworkArchitecture", + ) + static_ip_connectivity: "StaticIpConnectivity" = proto.Field( + proto.MESSAGE, + number=100, + oneof="connectivity", + message="StaticIpConnectivity", + ) + private_service_connect_connectivity: "PrivateServiceConnectConnectivity" = ( + proto.Field( + proto.MESSAGE, + number=101, + oneof="connectivity", + message="PrivateServiceConnectConnectivity", + ) + ) + + +class OracleConnectionProfile(proto.Message): + r"""Specifies connection parameters required specifically for + Oracle databases. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + host (str): + Required. The IP or hostname of the source + Oracle database. + port (int): + Required. The network port of the source + Oracle database. + username (str): + Required. The username that Database + Migration Service will use to connect to the + database. The value is encrypted when stored in + Database Migration Service. + password (str): + Required. Input only. The password for the + user that Database Migration Service will be + using to connect to the database. This field is + not returned on request, and the value is + encrypted when stored in Database Migration + Service. + password_set (bool): + Output only. Indicates whether a new password + is included in the request. + database_service (str): + Required. Database service for the Oracle + connection. + static_service_ip_connectivity (google.cloud.clouddms_v1.types.StaticServiceIpConnectivity): + Static Service IP connectivity. + + This field is a member of `oneof`_ ``connectivity``. + forward_ssh_connectivity (google.cloud.clouddms_v1.types.ForwardSshTunnelConnectivity): + Forward SSH tunnel connectivity. + + This field is a member of `oneof`_ ``connectivity``. + private_connectivity (google.cloud.clouddms_v1.types.PrivateConnectivity): + Private connectivity. + + This field is a member of `oneof`_ ``connectivity``. + """ + + host: str = proto.Field( + proto.STRING, + number=1, + ) + port: int = proto.Field( + proto.INT32, + number=2, + ) + username: str = proto.Field( + proto.STRING, + number=3, + ) + password: str = proto.Field( + proto.STRING, + number=4, + ) + password_set: bool = proto.Field( + proto.BOOL, + number=5, + ) + database_service: str = proto.Field( + proto.STRING, + number=6, + ) + static_service_ip_connectivity: "StaticServiceIpConnectivity" = proto.Field( + proto.MESSAGE, + number=100, + oneof="connectivity", + message="StaticServiceIpConnectivity", + ) + forward_ssh_connectivity: "ForwardSshTunnelConnectivity" = proto.Field( + proto.MESSAGE, + number=101, + oneof="connectivity", + message="ForwardSshTunnelConnectivity", + ) + private_connectivity: "PrivateConnectivity" = proto.Field( + proto.MESSAGE, + number=102, + oneof="connectivity", + message="PrivateConnectivity", + ) class CloudSqlConnectionProfile(proto.Message): @@ -289,6 +460,12 @@ class CloudSqlConnectionProfile(proto.Message): public_ip (str): Output only. The Cloud SQL database instance's public IP. + additional_public_ip (str): + Output only. The Cloud SQL database + instance's additional (outgoing) public IP. Used + when the Cloud SQL database availability type is + REGIONAL (i.e. multiple zones / highly + available). """ cloud_sql_id: str = proto.Field( @@ -308,6 +485,34 @@ class CloudSqlConnectionProfile(proto.Message): proto.STRING, number=4, ) + additional_public_ip: str = proto.Field( + proto.STRING, + number=5, + ) + + +class AlloyDbConnectionProfile(proto.Message): + r"""Specifies required connection parameters, and the parameters + required to create an AlloyDB destination cluster. + + Attributes: + cluster_id (str): + Required. The AlloyDB cluster ID that this + connection profile is associated with. + settings (google.cloud.clouddms_v1.types.AlloyDbSettings): + Immutable. Metadata used to create the + destination AlloyDB cluster. + """ + + cluster_id: str = proto.Field( + proto.STRING, + number=1, + ) + settings: "AlloyDbSettings" = proto.Field( + proto.MESSAGE, + number=2, + message="AlloyDbSettings", + ) class SqlAclEntry(proto.Message): @@ -373,6 +578,17 @@ class SqlIpConfig(proto.Message): SQL instance is accessible for private IP. For example, ``projects/myProject/global/networks/default``. This setting can be updated, but it cannot be removed after it is set. + allocated_ip_range (str): + Optional. The name of the allocated IP + address range for the private IP Cloud SQL + instance. This name refers to an already + allocated IP range address. If set, the instance + IP address will be created in the allocated + range. Note that this IP address range can't be + modified after the instance is created. If you + change the VPC when configuring connectivity + settings for the migration job, this field is + not relevant. require_ssl (google.protobuf.wrappers_pb2.BoolValue): Whether SSL connections over IP should be enforced or not. @@ -392,6 +608,10 @@ class SqlIpConfig(proto.Message): proto.STRING, number=2, ) + allocated_ip_range: str = proto.Field( + proto.STRING, + number=5, + ) require_ssl: wrappers_pb2.BoolValue = proto.Field( proto.MESSAGE, number=3, @@ -465,7 +685,13 @@ class CloudSqlSettings(proto.Message): is 10GB. zone (str): The Google Cloud Platform zone where your - Cloud SQL datdabse instance is located. + Cloud SQL database instance is located. + secondary_zone (str): + Optional. The Google Cloud Platform zone + where the failover Cloud SQL database instance + is located. Used when the Cloud SQL database + availability type is REGIONAL (i.e. multiple + zones / highly available). source_id (str): The Database Migration Service source connection profile ID, in the format: @@ -478,6 +704,15 @@ class CloudSqlSettings(proto.Message): collation (str): The Cloud SQL default instance level collation. + cmek_key_name (str): + The KMS key name used for the csql instance. + availability_type (google.cloud.clouddms_v1.types.CloudSqlSettings.SqlAvailabilityType): + Optional. Availability type. Potential values: + + - ``ZONAL``: The instance serves data from only one zone. + Outages in that zone affect data availability. + - ``REGIONAL``: The instance can serve data from more than + one zone in a region (it is highly available). """ class SqlActivationPolicy(proto.Enum): @@ -532,6 +767,8 @@ class SqlDatabaseVersion(proto.Enum): PostgreSQL 12. POSTGRES_13 (8): PostgreSQL 13. + POSTGRES_14 (17): + PostgreSQL 14. """ SQL_DATABASE_VERSION_UNSPECIFIED = 0 MYSQL_5_6 = 1 @@ -542,6 +779,22 @@ class SqlDatabaseVersion(proto.Enum): MYSQL_8_0 = 6 POSTGRES_12 = 7 POSTGRES_13 = 8 + POSTGRES_14 = 17 + + class SqlAvailabilityType(proto.Enum): + r"""The availability type of the given Cloud SQL instance. + + Values: + SQL_AVAILABILITY_TYPE_UNSPECIFIED (0): + This is an unknown Availability type. + ZONAL (1): + Zonal availablility instance. + REGIONAL (2): + Regional availability instance. + """ + SQL_AVAILABILITY_TYPE_UNSPECIFIED = 0 + ZONAL = 1 + REGIONAL = 2 database_version: SqlDatabaseVersion = proto.Field( proto.ENUM, @@ -596,6 +849,10 @@ class SqlDatabaseVersion(proto.Enum): proto.STRING, number=11, ) + secondary_zone: str = proto.Field( + proto.STRING, + number=18, + ) source_id: str = proto.Field( proto.STRING, number=12, @@ -612,16 +869,206 @@ class SqlDatabaseVersion(proto.Enum): proto.STRING, number=15, ) + cmek_key_name: str = proto.Field( + proto.STRING, + number=16, + ) + availability_type: SqlAvailabilityType = proto.Field( + proto.ENUM, + number=17, + enum=SqlAvailabilityType, + ) + + +class AlloyDbSettings(proto.Message): + r"""Settings for creating an AlloyDB cluster. + + Attributes: + initial_user (google.cloud.clouddms_v1.types.AlloyDbSettings.UserPassword): + Required. Input only. Initial user to setup + during cluster creation. Required. + vpc_network (str): + Required. The resource link for the VPC network in which + cluster resources are created and from which they are + accessible via Private IP. The network must belong to the + same project as the cluster. It is specified in the form: + "projects/{project_number}/global/networks/{network_id}". + This is required to create a cluster. + labels (MutableMapping[str, str]): + Labels for the AlloyDB cluster created by + DMS. An object containing a list of 'key', + 'value' pairs. + primary_instance_settings (google.cloud.clouddms_v1.types.AlloyDbSettings.PrimaryInstanceSettings): + + encryption_config (google.cloud.clouddms_v1.types.AlloyDbSettings.EncryptionConfig): + Optional. The encryption config can be + specified to encrypt the data disks and other + persistent data resources of a cluster with a + customer-managed encryption key (CMEK). When + this field is not specified, the cluster will + then use default encryption scheme to protect + the user data. + """ + + class UserPassword(proto.Message): + r"""The username/password for a database user. Used for + specifying initial users at cluster creation time. + + Attributes: + user (str): + The database username. + password (str): + The initial password for the user. + password_set (bool): + Output only. Indicates if the initial_user.password field + has been set. + """ + + user: str = proto.Field( + proto.STRING, + number=1, + ) + password: str = proto.Field( + proto.STRING, + number=2, + ) + password_set: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class PrimaryInstanceSettings(proto.Message): + r"""Settings for the cluster's primary instance + + Attributes: + id (str): + Required. The ID of the AlloyDB primary instance. The ID + must satisfy the regex expression "[a-z0-9-]+". + machine_config (google.cloud.clouddms_v1.types.AlloyDbSettings.PrimaryInstanceSettings.MachineConfig): + Configuration for the machines that host the + underlying database engine. + database_flags (MutableMapping[str, str]): + Database flags to pass to AlloyDB when DMS is + creating the AlloyDB cluster and instances. See + the AlloyDB documentation for how these can be + used. + labels (MutableMapping[str, str]): + Labels for the AlloyDB primary instance + created by DMS. An object containing a list of + 'key', 'value' pairs. + private_ip (str): + Output only. The private IP address for the + Instance. This is the connection endpoint for an + end-user application. + """ + + class MachineConfig(proto.Message): + r"""MachineConfig describes the configuration of a machine. + + Attributes: + cpu_count (int): + The number of CPU's in the VM instance. + """ + + cpu_count: int = proto.Field( + proto.INT32, + number=1, + ) + + id: str = proto.Field( + proto.STRING, + number=1, + ) + machine_config: "AlloyDbSettings.PrimaryInstanceSettings.MachineConfig" = ( + proto.Field( + proto.MESSAGE, + number=2, + message="AlloyDbSettings.PrimaryInstanceSettings.MachineConfig", + ) + ) + database_flags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=6, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + private_ip: str = proto.Field( + proto.STRING, + number=8, + ) + + class EncryptionConfig(proto.Message): + r"""EncryptionConfig describes the encryption config of a cluster + that is encrypted with a CMEK (customer-managed encryption key). + + Attributes: + kms_key_name (str): + The fully-qualified resource name of the KMS key. Each Cloud + KMS key is regionalized and has the following format: + projects/[PROJECT]/locations/[REGION]/keyRings/[RING]/cryptoKeys/[KEY_NAME] + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=1, + ) + + initial_user: UserPassword = proto.Field( + proto.MESSAGE, + number=1, + message=UserPassword, + ) + vpc_network: str = proto.Field( + proto.STRING, + number=2, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + primary_instance_settings: PrimaryInstanceSettings = proto.Field( + proto.MESSAGE, + number=4, + message=PrimaryInstanceSettings, + ) + encryption_config: EncryptionConfig = proto.Field( + proto.MESSAGE, + number=5, + message=EncryptionConfig, + ) class StaticIpConnectivity(proto.Message): r"""The source database will allow incoming connections from the - destination database's public IP. You can retrieve the Cloud SQL - instance's public IP from the Cloud SQL console or using Cloud - SQL APIs. No additional configuration is required. + public IP of the destination database. You can retrieve the + public IP of the Cloud SQL instance from the Cloud SQL console + or using Cloud SQL APIs. No additional configuration is + required. + + """ + +class PrivateServiceConnectConnectivity(proto.Message): + r"""Private Service Connect connectivity + (https://cloud.google.com/vpc/docs/private-service-connect#service-attachments) + + Attributes: + service_attachment (str): + Required. A service attachment that exposes a database, and + has the following format: + projects/{project}/regions/{region}/serviceAttachments/{service_attachment_name} """ + service_attachment: str = proto.Field( + proto.STRING, + number=1, + ) + class ReverseSshConnectivity(proto.Message): r"""The details needed to configure a reverse SSH tunnel between @@ -685,6 +1132,76 @@ class VpcPeeringConnectivity(proto.Message): ) +class ForwardSshTunnelConnectivity(proto.Message): + r"""Forward SSH Tunnel connectivity. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + hostname (str): + Required. Hostname for the SSH tunnel. + username (str): + Required. Username for the SSH tunnel. + port (int): + Port for the SSH tunnel, default value is 22. + password (str): + Input only. SSH password. + + This field is a member of `oneof`_ ``authentication_method``. + private_key (str): + Input only. SSH private key. + + This field is a member of `oneof`_ ``authentication_method``. + """ + + hostname: str = proto.Field( + proto.STRING, + number=1, + ) + username: str = proto.Field( + proto.STRING, + number=2, + ) + port: int = proto.Field( + proto.INT32, + number=3, + ) + password: str = proto.Field( + proto.STRING, + number=100, + oneof="authentication_method", + ) + private_key: str = proto.Field( + proto.STRING, + number=101, + oneof="authentication_method", + ) + + +class StaticServiceIpConnectivity(proto.Message): + r"""Static IP address connectivity configured on service project.""" + + +class PrivateConnectivity(proto.Message): + r"""Private Connectivity. + + Attributes: + private_connection (str): + Required. The resource name (URI) of the + private connection. + """ + + private_connection: str = proto.Field( + proto.STRING, + number=1, + ) + + class DatabaseType(proto.Message): r"""A message defining the database engine and provider. @@ -721,7 +1238,7 @@ class MigrationJob(proto.Message): name (str): The name (URI) of this migration job resource, in the form of: - projects/{project}/locations/{location}/instances/{instance}. + projects/{project}/locations/{location}/migrationJobs/{migrationJob}. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The timestamp when the migration job resource was created. A timestamp in RFC3339 @@ -750,7 +1267,11 @@ class MigrationJob(proto.Message): Required. The migration job type. dump_path (str): The path to the dump file in Google Cloud Storage, in the - format: (gs://[BUCKET_NAME]/[OBJECT_NAME]). + format: (gs://[BUCKET_NAME]/[OBJECT_NAME]). This field and + the "dump_flags" field are mutually exclusive. + dump_flags (google.cloud.clouddms_v1.types.MigrationJob.DumpFlags): + The initial dump flags. This field and the "dump_path" field + are mutually exclusive. source (str): Required. The resource name (URI) of the source connection profile. @@ -789,6 +1310,30 @@ class MigrationJob(proto.Message): end_time (google.protobuf.timestamp_pb2.Timestamp): Output only. If the migration job is completed, the time when it was completed. + conversion_workspace (google.cloud.clouddms_v1.types.ConversionWorkspaceInfo): + The conversion workspace used by the + migration. + filter (str): + This field can be used to select the entities + to migrate as part of the migration job. It uses + AIP-160 notation to select a subset of the + entities configured on the associated + conversion-workspace. This field should not be + set on migration-jobs that are not associated + with a conversion workspace. + cmek_key_name (str): + The CMEK (customer-managed encryption key) fully qualified + key name used for the migration job. This field supports all + migration jobs types except for: + + - Mysql to Mysql (use the cmek field in the cloudsql + connection profile instead). + - PostrgeSQL to PostgreSQL (use the cmek field in the + cloudsql connection profile instead). + - PostgreSQL to AlloyDB (use the kms_key_name field in the + alloydb connection profile instead). Each Cloud CMEK key + has the following format: + projects/[PROJECT]/locations/[REGION]/keyRings/[RING]/cryptoKeys/[KEY_NAME] """ class State(proto.Enum): @@ -888,6 +1433,39 @@ class Type(proto.Enum): ONE_TIME = 1 CONTINUOUS = 2 + class DumpFlag(proto.Message): + r"""Dump flag definition. + + Attributes: + name (str): + The name of the flag + value (str): + The value of the flag. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + value: str = proto.Field( + proto.STRING, + number=2, + ) + + class DumpFlags(proto.Message): + r"""Dump flags definition. + + Attributes: + dump_flags (MutableSequence[google.cloud.clouddms_v1.types.MigrationJob.DumpFlag]): + The flags for the initial dump. + """ + + dump_flags: MutableSequence["MigrationJob.DumpFlag"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="MigrationJob.DumpFlag", + ) + name: str = proto.Field( proto.STRING, number=1, @@ -930,6 +1508,11 @@ class Type(proto.Enum): proto.STRING, number=9, ) + dump_flags: DumpFlags = proto.Field( + proto.MESSAGE, + number=17, + message=DumpFlags, + ) source: str = proto.Field( proto.STRING, number=10, @@ -981,6 +1564,40 @@ class Type(proto.Enum): number=16, message=timestamp_pb2.Timestamp, ) + conversion_workspace: "ConversionWorkspaceInfo" = proto.Field( + proto.MESSAGE, + number=18, + message="ConversionWorkspaceInfo", + ) + filter: str = proto.Field( + proto.STRING, + number=20, + ) + cmek_key_name: str = proto.Field( + proto.STRING, + number=21, + ) + + +class ConversionWorkspaceInfo(proto.Message): + r"""A conversion workspace's version. + + Attributes: + name (str): + The resource name (URI) of the conversion + workspace. + commit_id (str): + The commit ID of the conversion workspace. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + commit_id: str = proto.Field( + proto.STRING, + number=2, + ) class ConnectionProfile(proto.Message): @@ -997,7 +1614,7 @@ class ConnectionProfile(proto.Message): name (str): The name of this connection profile resource in the form of - projects/{project}/locations/{location}/instances/{instance}. + projects/{project}/locations/{location}/connectionProfiles/{connectionProfile}. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The timestamp when the resource was created. A timestamp in RFC3339 UTC "Zulu" @@ -1028,10 +1645,18 @@ class ConnectionProfile(proto.Message): postgresql (google.cloud.clouddms_v1.types.PostgreSqlConnectionProfile): A PostgreSQL database connection profile. + This field is a member of `oneof`_ ``connection_profile``. + oracle (google.cloud.clouddms_v1.types.OracleConnectionProfile): + An Oracle database connection profile. + This field is a member of `oneof`_ ``connection_profile``. cloudsql (google.cloud.clouddms_v1.types.CloudSqlConnectionProfile): A CloudSQL database connection profile. + This field is a member of `oneof`_ ``connection_profile``. + alloydb (google.cloud.clouddms_v1.types.AlloyDbConnectionProfile): + An AlloyDB cluster connection profile. + This field is a member of `oneof`_ ``connection_profile``. error (google.rpc.status_pb2.Status): Output only. The error details in case of @@ -1114,12 +1739,24 @@ class State(proto.Enum): oneof="connection_profile", message="PostgreSqlConnectionProfile", ) + oracle: "OracleConnectionProfile" = proto.Field( + proto.MESSAGE, + number=104, + oneof="connection_profile", + message="OracleConnectionProfile", + ) cloudsql: "CloudSqlConnectionProfile" = proto.Field( proto.MESSAGE, number=102, oneof="connection_profile", message="CloudSqlConnectionProfile", ) + alloydb: "AlloyDbConnectionProfile" = proto.Field( + proto.MESSAGE, + number=105, + oneof="connection_profile", + message="AlloyDbConnectionProfile", + ) error: status_pb2.Status = proto.Field( proto.MESSAGE, number=7, @@ -1205,6 +1842,20 @@ class ErrorCode(proto.Enum): CANT_RESTART_RUNNING_MIGRATION (21): Migration is already running at the time of restart request. + TABLES_WITH_LIMITED_SUPPORT (24): + The source has tables with limited support. + E.g. PostgreSQL tables without primary keys. + UNSUPPORTED_DATABASE_LOCALE (25): + The source uses an unsupported locale. + UNSUPPORTED_DATABASE_FDW_CONFIG (26): + The source uses an unsupported Foreign Data + Wrapper configuration. + ERROR_RDBMS (27): + There was an underlying RDBMS error. + SOURCE_SIZE_EXCEEDS_THRESHOLD (28): + The source DB size in Bytes exceeds a certain + threshold. The migration might require an + increase of quota, or might not be supported. """ ERROR_CODE_UNSPECIFIED = 0 CONNECTION_FAILURE = 1 @@ -1226,6 +1877,11 @@ class ErrorCode(proto.Enum): UNSUPPORTED_TABLE_DEFINITION = 18 UNSUPPORTED_DEFINER = 19 CANT_RESTART_RUNNING_MIGRATION = 21 + TABLES_WITH_LIMITED_SUPPORT = 24 + UNSUPPORTED_DATABASE_LOCALE = 25 + UNSUPPORTED_DATABASE_FDW_CONFIG = 26 + ERROR_RDBMS = 27 + SOURCE_SIZE_EXCEEDS_THRESHOLD = 28 error_code: ErrorCode = proto.Field( proto.ENUM, @@ -1242,4 +1898,135 @@ class ErrorCode(proto.Enum): ) +class PrivateConnection(proto.Message): + r"""The PrivateConnection resource is used to establish private + connectivity with the customer's network. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The name of the resource. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The create time of the resource. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update time of the + resource. + labels (MutableMapping[str, str]): + The resource labels for private connections to use to + annotate any related underlying resources such as Compute + Engine VMs. An object containing a list of "key": "value" + pairs. + + Example: + ``{ "name": "wrench", "mass": "1.3kg", "count": "3" }``. + display_name (str): + The private connection display name. + state (google.cloud.clouddms_v1.types.PrivateConnection.State): + Output only. The state of the private + connection. + error (google.rpc.status_pb2.Status): + Output only. The error details in case of + state FAILED. + vpc_peering_config (google.cloud.clouddms_v1.types.VpcPeeringConfig): + VPC peering configuration. + + This field is a member of `oneof`_ ``connectivity``. + """ + + class State(proto.Enum): + r"""Private Connection state. + + Values: + STATE_UNSPECIFIED (0): + No description available. + CREATING (1): + The private connection is in creation state - + creating resources. + CREATED (2): + The private connection has been created with + all of its resources. + FAILED (3): + The private connection creation has failed. + DELETING (4): + The private connection is being deleted. + FAILED_TO_DELETE (5): + Delete request has failed, resource is in + invalid state. + DELETED (6): + The private connection has been deleted. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + CREATED = 2 + FAILED = 3 + DELETING = 4 + FAILED_TO_DELETE = 5 + DELETED = 6 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + display_name: str = proto.Field( + proto.STRING, + number=5, + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=7, + message=status_pb2.Status, + ) + vpc_peering_config: "VpcPeeringConfig" = proto.Field( + proto.MESSAGE, + number=100, + oneof="connectivity", + message="VpcPeeringConfig", + ) + + +class VpcPeeringConfig(proto.Message): + r"""The VPC peering configuration is used to create VPC peering + with the consumer's VPC. + + Attributes: + vpc_name (str): + Required. Fully qualified name of the VPC + that Database Migration Service will peer to. + subnet (str): + Required. A free subnet for peering. (CIDR of + /29) + """ + + vpc_name: str = proto.Field( + proto.STRING, + number=1, + ) + subnet: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/clouddms_v1/types/conversionworkspace_resources.py b/google/cloud/clouddms_v1/types/conversionworkspace_resources.py new file mode 100644 index 0000000..581f67c --- /dev/null +++ b/google/cloud/clouddms_v1/types/conversionworkspace_resources.py @@ -0,0 +1,1222 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import struct_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.clouddms_v1.types import clouddms_resources + +__protobuf__ = proto.module( + package="google.cloud.clouddms.v1", + manifest={ + "DatabaseEntityType", + "BackgroundJobType", + "ImportRulesFileFormat", + "DatabaseEngineInfo", + "ConversionWorkspace", + "BackgroundJobLogEntry", + "DatabaseEntity", + "SchemaEntity", + "TableEntity", + "ColumnEntity", + "ConstraintEntity", + "IndexEntity", + "TriggerEntity", + "ViewEntity", + "SequenceEntity", + "StoredProcedureEntity", + "FunctionEntity", + "SynonymEntity", + "PackageEntity", + "EntityMapping", + "EntityMappingLogEntry", + }, +) + + +class DatabaseEntityType(proto.Enum): + r"""The type of database entities supported, + + Values: + DATABASE_ENTITY_TYPE_UNSPECIFIED (0): + Unspecified database entity type. + DATABASE_ENTITY_TYPE_SCHEMA (1): + Schema. + DATABASE_ENTITY_TYPE_TABLE (2): + Table. + DATABASE_ENTITY_TYPE_COLUMN (3): + Column. + DATABASE_ENTITY_TYPE_CONSTRAINT (4): + Constraint. + DATABASE_ENTITY_TYPE_INDEX (5): + Index. + DATABASE_ENTITY_TYPE_TRIGGER (6): + Trigger. + DATABASE_ENTITY_TYPE_VIEW (7): + View. + DATABASE_ENTITY_TYPE_SEQUENCE (8): + Sequence. + DATABASE_ENTITY_TYPE_STORED_PROCEDURE (9): + Stored Procedure. + DATABASE_ENTITY_TYPE_FUNCTION (10): + Function. + DATABASE_ENTITY_TYPE_SYNONYM (11): + Synonym. + DATABASE_ENTITY_TYPE_DATABASE_PACKAGE (12): + Package. + DATABASE_ENTITY_TYPE_UDT (13): + UDT. + DATABASE_ENTITY_TYPE_MATERIALIZED_VIEW (14): + Materialized View. + DATABASE_ENTITY_TYPE_DATABASE (15): + Database. + """ + DATABASE_ENTITY_TYPE_UNSPECIFIED = 0 + DATABASE_ENTITY_TYPE_SCHEMA = 1 + DATABASE_ENTITY_TYPE_TABLE = 2 + DATABASE_ENTITY_TYPE_COLUMN = 3 + DATABASE_ENTITY_TYPE_CONSTRAINT = 4 + DATABASE_ENTITY_TYPE_INDEX = 5 + DATABASE_ENTITY_TYPE_TRIGGER = 6 + DATABASE_ENTITY_TYPE_VIEW = 7 + DATABASE_ENTITY_TYPE_SEQUENCE = 8 + DATABASE_ENTITY_TYPE_STORED_PROCEDURE = 9 + DATABASE_ENTITY_TYPE_FUNCTION = 10 + DATABASE_ENTITY_TYPE_SYNONYM = 11 + DATABASE_ENTITY_TYPE_DATABASE_PACKAGE = 12 + DATABASE_ENTITY_TYPE_UDT = 13 + DATABASE_ENTITY_TYPE_MATERIALIZED_VIEW = 14 + DATABASE_ENTITY_TYPE_DATABASE = 15 + + +class BackgroundJobType(proto.Enum): + r"""The types of jobs that can be executed in the background. + + Values: + BACKGROUND_JOB_TYPE_UNSPECIFIED (0): + Unspecified background job type. + BACKGROUND_JOB_TYPE_SOURCE_SEED (1): + Job to seed from the source database. + BACKGROUND_JOB_TYPE_CONVERT (2): + Job to convert the source database into a + draft of the destination database. + BACKGROUND_JOB_TYPE_APPLY_DESTINATION (3): + Job to apply the draft tree onto the + destination. + BACKGROUND_JOB_TYPE_IMPORT_RULES_FILE (5): + Job to import and convert mapping rules from + an external source such as an ora2pg config + file. + """ + BACKGROUND_JOB_TYPE_UNSPECIFIED = 0 + BACKGROUND_JOB_TYPE_SOURCE_SEED = 1 + BACKGROUND_JOB_TYPE_CONVERT = 2 + BACKGROUND_JOB_TYPE_APPLY_DESTINATION = 3 + BACKGROUND_JOB_TYPE_IMPORT_RULES_FILE = 5 + + +class ImportRulesFileFormat(proto.Enum): + r"""The format for the import rules file. + + Values: + IMPORT_RULES_FILE_FORMAT_UNSPECIFIED (0): + Unspecified rules format. + IMPORT_RULES_FILE_FORMAT_HARBOUR_BRIDGE_SESSION_FILE (1): + HarbourBridge session file. + IMPORT_RULES_FILE_FORMAT_ORATOPG_CONFIG_FILE (2): + Ora2Pg configuration file. + """ + IMPORT_RULES_FILE_FORMAT_UNSPECIFIED = 0 + IMPORT_RULES_FILE_FORMAT_HARBOUR_BRIDGE_SESSION_FILE = 1 + IMPORT_RULES_FILE_FORMAT_ORATOPG_CONFIG_FILE = 2 + + +class DatabaseEngineInfo(proto.Message): + r"""The type and version of a source or destination database. + + Attributes: + engine (google.cloud.clouddms_v1.types.DatabaseEngine): + Required. Engine type. + version (str): + Required. Engine named version, for example + 12.c.1. + """ + + engine: clouddms_resources.DatabaseEngine = proto.Field( + proto.ENUM, + number=1, + enum=clouddms_resources.DatabaseEngine, + ) + version: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ConversionWorkspace(proto.Message): + r"""The main conversion workspace resource entity. + + Attributes: + name (str): + Full name of the workspace resource, in the form of: + projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}. + source (google.cloud.clouddms_v1.types.DatabaseEngineInfo): + Required. The source engine details. + destination (google.cloud.clouddms_v1.types.DatabaseEngineInfo): + Required. The destination engine details. + global_settings (MutableMapping[str, str]): + A generic list of settings for the workspace. The settings + are database pair dependant and can indicate default + behavior for the mapping rules engine or turn on or off + specific features. Such examples can be: + convert_foreign_key_to_interleave=true, skip_triggers=false, + ignore_non_table_synonyms=true + has_uncommitted_changes (bool): + Output only. Whether the workspace has + uncommitted changes (changes which were made + after the workspace was committed). + latest_commit_id (str): + Output only. The latest commit ID. + latest_commit_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the workspace + was committed. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the workspace + resource was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp when the workspace + resource was last updated. + display_name (str): + The display name for the workspace. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + source: "DatabaseEngineInfo" = proto.Field( + proto.MESSAGE, + number=2, + message="DatabaseEngineInfo", + ) + destination: "DatabaseEngineInfo" = proto.Field( + proto.MESSAGE, + number=3, + message="DatabaseEngineInfo", + ) + global_settings: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + has_uncommitted_changes: bool = proto.Field( + proto.BOOL, + number=5, + ) + latest_commit_id: str = proto.Field( + proto.STRING, + number=6, + ) + latest_commit_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=10, + message=timestamp_pb2.Timestamp, + ) + display_name: str = proto.Field( + proto.STRING, + number=11, + ) + + +class BackgroundJobLogEntry(proto.Message): + r"""Execution log of a background job. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + The background job log entry ID. + job_type (google.cloud.clouddms_v1.types.BackgroundJobType): + The type of job that was executed. + start_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp when the background job was + started. + finish_time (google.protobuf.timestamp_pb2.Timestamp): + The timestamp when the background job was + finished. + completion_state (google.cloud.clouddms_v1.types.BackgroundJobLogEntry.JobCompletionState): + Job completion state, i.e. the final state + after the job completed. + completion_comment (str): + Job completion comment, such as how many + entities were seeded, how many warnings were + found during conversion, and similar + information. + request_autocommit (bool): + Whether the client requested the conversion + workspace to be committed after a successful + completion of the job. + seed_job_details (google.cloud.clouddms_v1.types.BackgroundJobLogEntry.SeedJobDetails): + Seed job details. + + This field is a member of `oneof`_ ``job_details``. + import_rules_job_details (google.cloud.clouddms_v1.types.BackgroundJobLogEntry.ImportRulesJobDetails): + Import rules job details. + + This field is a member of `oneof`_ ``job_details``. + convert_job_details (google.cloud.clouddms_v1.types.BackgroundJobLogEntry.ConvertJobDetails): + Convert job details. + + This field is a member of `oneof`_ ``job_details``. + apply_job_details (google.cloud.clouddms_v1.types.BackgroundJobLogEntry.ApplyJobDetails): + Apply job details. + + This field is a member of `oneof`_ ``job_details``. + """ + + class JobCompletionState(proto.Enum): + r"""Final state after a job completes. + + Values: + JOB_COMPLETION_STATE_UNSPECIFIED (0): + The status is not specified. This state is + used when job is not yet finished. + SUCCEEDED (1): + Success. + FAILED (2): + Error. + """ + JOB_COMPLETION_STATE_UNSPECIFIED = 0 + SUCCEEDED = 1 + FAILED = 2 + + class SeedJobDetails(proto.Message): + r"""Details regarding a Seed background job. + + Attributes: + connection_profile (str): + The connection profile which was used for the + seed job. + """ + + connection_profile: str = proto.Field( + proto.STRING, + number=1, + ) + + class ImportRulesJobDetails(proto.Message): + r"""Details regarding an Import Rules background job. + + Attributes: + files (MutableSequence[str]): + File names used for the import rules job. + file_format (google.cloud.clouddms_v1.types.ImportRulesFileFormat): + The requested file format. + """ + + files: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + file_format: "ImportRulesFileFormat" = proto.Field( + proto.ENUM, + number=2, + enum="ImportRulesFileFormat", + ) + + class ConvertJobDetails(proto.Message): + r"""Details regarding a Convert background job. + + Attributes: + filter (str): + AIP-160 based filter used to specify the + entities to convert + """ + + filter: str = proto.Field( + proto.STRING, + number=1, + ) + + class ApplyJobDetails(proto.Message): + r"""Details regarding an Apply background job. + + Attributes: + connection_profile (str): + The connection profile which was used for the + apply job. + filter (str): + AIP-160 based filter used to specify the + entities to apply + """ + + connection_profile: str = proto.Field( + proto.STRING, + number=1, + ) + filter: str = proto.Field( + proto.STRING, + number=2, + ) + + id: str = proto.Field( + proto.STRING, + number=1, + ) + job_type: "BackgroundJobType" = proto.Field( + proto.ENUM, + number=2, + enum="BackgroundJobType", + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + finish_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + completion_state: JobCompletionState = proto.Field( + proto.ENUM, + number=5, + enum=JobCompletionState, + ) + completion_comment: str = proto.Field( + proto.STRING, + number=6, + ) + request_autocommit: bool = proto.Field( + proto.BOOL, + number=7, + ) + seed_job_details: SeedJobDetails = proto.Field( + proto.MESSAGE, + number=100, + oneof="job_details", + message=SeedJobDetails, + ) + import_rules_job_details: ImportRulesJobDetails = proto.Field( + proto.MESSAGE, + number=101, + oneof="job_details", + message=ImportRulesJobDetails, + ) + convert_job_details: ConvertJobDetails = proto.Field( + proto.MESSAGE, + number=102, + oneof="job_details", + message=ConvertJobDetails, + ) + apply_job_details: ApplyJobDetails = proto.Field( + proto.MESSAGE, + number=103, + oneof="job_details", + message=ApplyJobDetails, + ) + + +class DatabaseEntity(proto.Message): + r"""The base entity type for all the database related entities. + The message contains the entity name, the name of its parent, + the entity type, and the specific details per entity type. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + short_name (str): + The short name (e.g. table name) of the + entity. + parent_entity (str): + The full name of the parent entity (e.g. + schema name). + tree (google.cloud.clouddms_v1.types.DatabaseEntity.TreeType): + The type of tree the entity belongs to. + entity_type (google.cloud.clouddms_v1.types.DatabaseEntityType): + The type of the database entity (table, view, + index, ...). + mappings (MutableSequence[google.cloud.clouddms_v1.types.EntityMapping]): + Details about entity mappings. + For source tree entities, this holds the draft + entities which were generated by the mapping + rules. + For draft tree entities, this holds the source + entities which were converted to form the draft + entity. + Destination entities will have no mapping + details. + schema (google.cloud.clouddms_v1.types.SchemaEntity): + Schema. + + This field is a member of `oneof`_ ``entity_body``. + table (google.cloud.clouddms_v1.types.TableEntity): + Table. + + This field is a member of `oneof`_ ``entity_body``. + view (google.cloud.clouddms_v1.types.ViewEntity): + View. + + This field is a member of `oneof`_ ``entity_body``. + sequence (google.cloud.clouddms_v1.types.SequenceEntity): + Sequence. + + This field is a member of `oneof`_ ``entity_body``. + stored_procedure (google.cloud.clouddms_v1.types.StoredProcedureEntity): + Stored procedure. + + This field is a member of `oneof`_ ``entity_body``. + database_function (google.cloud.clouddms_v1.types.FunctionEntity): + Function. + + This field is a member of `oneof`_ ``entity_body``. + synonym (google.cloud.clouddms_v1.types.SynonymEntity): + Synonym. + + This field is a member of `oneof`_ ``entity_body``. + database_package (google.cloud.clouddms_v1.types.PackageEntity): + Package. + + This field is a member of `oneof`_ ``entity_body``. + """ + + class TreeType(proto.Enum): + r"""The type of database entities tree. + + Values: + TREE_TYPE_UNSPECIFIED (0): + Tree type unspecified. + SOURCE (1): + Tree of entities loaded from a source + database. + DRAFT (2): + Tree of entities converted from the source + tree using the mapping rules. + DESTINATION (3): + Tree of entities observed on the destination + database. + """ + TREE_TYPE_UNSPECIFIED = 0 + SOURCE = 1 + DRAFT = 2 + DESTINATION = 3 + + short_name: str = proto.Field( + proto.STRING, + number=1, + ) + parent_entity: str = proto.Field( + proto.STRING, + number=2, + ) + tree: TreeType = proto.Field( + proto.ENUM, + number=3, + enum=TreeType, + ) + entity_type: "DatabaseEntityType" = proto.Field( + proto.ENUM, + number=4, + enum="DatabaseEntityType", + ) + mappings: MutableSequence["EntityMapping"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="EntityMapping", + ) + schema: "SchemaEntity" = proto.Field( + proto.MESSAGE, + number=102, + oneof="entity_body", + message="SchemaEntity", + ) + table: "TableEntity" = proto.Field( + proto.MESSAGE, + number=103, + oneof="entity_body", + message="TableEntity", + ) + view: "ViewEntity" = proto.Field( + proto.MESSAGE, + number=104, + oneof="entity_body", + message="ViewEntity", + ) + sequence: "SequenceEntity" = proto.Field( + proto.MESSAGE, + number=105, + oneof="entity_body", + message="SequenceEntity", + ) + stored_procedure: "StoredProcedureEntity" = proto.Field( + proto.MESSAGE, + number=106, + oneof="entity_body", + message="StoredProcedureEntity", + ) + database_function: "FunctionEntity" = proto.Field( + proto.MESSAGE, + number=107, + oneof="entity_body", + message="FunctionEntity", + ) + synonym: "SynonymEntity" = proto.Field( + proto.MESSAGE, + number=108, + oneof="entity_body", + message="SynonymEntity", + ) + database_package: "PackageEntity" = proto.Field( + proto.MESSAGE, + number=109, + oneof="entity_body", + message="PackageEntity", + ) + + +class SchemaEntity(proto.Message): + r"""Schema typically has no parent entity, but can have a parent + entity DatabaseInstance (for database engines which support it). + For some database engines, the terms schema and user can be + used interchangeably when they refer to a namespace or a + collection of other database entities. Can store additional + information which is schema specific. + + Attributes: + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=1, + message=struct_pb2.Struct, + ) + + +class TableEntity(proto.Message): + r"""Table's parent is a schema. + + Attributes: + columns (MutableSequence[google.cloud.clouddms_v1.types.ColumnEntity]): + Table columns. + constraints (MutableSequence[google.cloud.clouddms_v1.types.ConstraintEntity]): + Table constraints. + indices (MutableSequence[google.cloud.clouddms_v1.types.IndexEntity]): + Table indices. + triggers (MutableSequence[google.cloud.clouddms_v1.types.TriggerEntity]): + Table triggers. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + comment (str): + Comment associated with the table. + """ + + columns: MutableSequence["ColumnEntity"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="ColumnEntity", + ) + constraints: MutableSequence["ConstraintEntity"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="ConstraintEntity", + ) + indices: MutableSequence["IndexEntity"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="IndexEntity", + ) + triggers: MutableSequence["TriggerEntity"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="TriggerEntity", + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) + comment: str = proto.Field( + proto.STRING, + number=6, + ) + + +class ColumnEntity(proto.Message): + r"""Column is not used as an independent entity, it is retrieved + as part of a Table entity. + + Attributes: + name (str): + Column name. + data_type (str): + Column data type. + charset (str): + Charset override - instead of table level + charset. + collation (str): + Collation override - instead of table level + collation. + length (int): + Column length - e.g. varchar (50). + precision (int): + Column precision - when relevant. + scale (int): + Column scale - when relevant. + fractional_seconds_precision (int): + Column fractional second precision - used for + timestamp based datatypes. + array (bool): + Is the column of array type. + array_length (int): + If the column is array, of which length. + nullable (bool): + Is the column nullable. + auto_generated (bool): + Is the column auto-generated/identity. + udt (bool): + Is the column a UDT. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + set_values (MutableSequence[str]): + Specifies the list of values allowed in the + column. Only used for set data type. + comment (str): + Comment associated with the column. + ordinal_position (int): + Column order in the table. + default_value (str): + Default value of the column. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + data_type: str = proto.Field( + proto.STRING, + number=2, + ) + charset: str = proto.Field( + proto.STRING, + number=3, + ) + collation: str = proto.Field( + proto.STRING, + number=4, + ) + length: int = proto.Field( + proto.INT64, + number=5, + ) + precision: int = proto.Field( + proto.INT32, + number=6, + ) + scale: int = proto.Field( + proto.INT32, + number=7, + ) + fractional_seconds_precision: int = proto.Field( + proto.INT32, + number=8, + ) + array: bool = proto.Field( + proto.BOOL, + number=9, + ) + array_length: int = proto.Field( + proto.INT32, + number=10, + ) + nullable: bool = proto.Field( + proto.BOOL, + number=11, + ) + auto_generated: bool = proto.Field( + proto.BOOL, + number=12, + ) + udt: bool = proto.Field( + proto.BOOL, + number=13, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=14, + message=struct_pb2.Struct, + ) + set_values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=15, + ) + comment: str = proto.Field( + proto.STRING, + number=16, + ) + ordinal_position: int = proto.Field( + proto.INT32, + number=17, + ) + default_value: str = proto.Field( + proto.STRING, + number=18, + ) + + +class ConstraintEntity(proto.Message): + r"""Constraint is not used as an independent entity, it is + retrieved as part of another entity such as Table or View. + + Attributes: + name (str): + The name of the table constraint. + type_ (str): + Type of constraint, for example unique, + primary key, foreign key (currently only primary + key is supported). + table_columns (MutableSequence[str]): + Table columns used as part of the Constraint, + for example primary key constraint should list + the columns which constitutes the key. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + reference_columns (MutableSequence[str]): + Reference columns which may be associated with the + constraint. For example, if the constraint is a FOREIGN_KEY, + this represents the list of full names of referenced columns + by the foreign key. + reference_table (str): + Reference table which may be associated with the constraint. + For example, if the constraint is a FOREIGN_KEY, this + represents the list of full name of the referenced table by + the foreign key. + table_name (str): + Table which is associated with the constraint. In case the + constraint is defined on a table, this field is left empty + as this information is stored in parent_name. However, if + constraint is defined on a view, this field stores the table + name on which the view is defined. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + table_columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=4, + message=struct_pb2.Struct, + ) + reference_columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + reference_table: str = proto.Field( + proto.STRING, + number=6, + ) + table_name: str = proto.Field( + proto.STRING, + number=7, + ) + + +class IndexEntity(proto.Message): + r"""Index is not used as an independent entity, it is retrieved + as part of a Table entity. + + Attributes: + name (str): + The name of the index. + type_ (str): + Type of index, for example B-TREE. + table_columns (MutableSequence[str]): + Table columns used as part of the Index, for + example B-TREE index should list the columns + which constitutes the index. + unique (bool): + Boolean value indicating whether the index is + unique. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: str = proto.Field( + proto.STRING, + number=2, + ) + table_columns: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + unique: bool = proto.Field( + proto.BOOL, + number=4, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) + + +class TriggerEntity(proto.Message): + r"""Trigger is not used as an independent entity, it is retrieved + as part of a Table entity. + + Attributes: + name (str): + The name of the trigger. + triggering_events (MutableSequence[str]): + The DML, DDL, or database events that fire + the trigger, for example INSERT, UPDATE. + trigger_type (str): + Indicates when the trigger fires, for example + BEFORE STATEMENT, AFTER EACH ROW. + sql_code (str): + The SQL code which creates the trigger. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + triggering_events: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + trigger_type: str = proto.Field( + proto.STRING, + number=3, + ) + sql_code: str = proto.Field( + proto.STRING, + number=4, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=5, + message=struct_pb2.Struct, + ) + + +class ViewEntity(proto.Message): + r"""View's parent is a schema. + + Attributes: + sql_code (str): + The SQL code which creates the view. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + constraints (MutableSequence[google.cloud.clouddms_v1.types.ConstraintEntity]): + View constraints. + """ + + sql_code: str = proto.Field( + proto.STRING, + number=1, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + constraints: MutableSequence["ConstraintEntity"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="ConstraintEntity", + ) + + +class SequenceEntity(proto.Message): + r"""Sequence's parent is a schema. + + Attributes: + increment (int): + Increment value for the sequence. + start_value (bytes): + Start number for the sequence represented as + bytes to accommodate large. numbers + max_value (bytes): + Maximum number for the sequence represented + as bytes to accommodate large. numbers + min_value (bytes): + Minimum number for the sequence represented + as bytes to accommodate large. numbers + cycle (bool): + Indicates whether the sequence value should + cycle through. + cache (int): + Indicates number of entries to cache / + precreate. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + increment: int = proto.Field( + proto.INT64, + number=1, + ) + start_value: bytes = proto.Field( + proto.BYTES, + number=2, + ) + max_value: bytes = proto.Field( + proto.BYTES, + number=3, + ) + min_value: bytes = proto.Field( + proto.BYTES, + number=4, + ) + cycle: bool = proto.Field( + proto.BOOL, + number=5, + ) + cache: int = proto.Field( + proto.INT64, + number=6, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=7, + message=struct_pb2.Struct, + ) + + +class StoredProcedureEntity(proto.Message): + r"""Stored procedure's parent is a schema. + + Attributes: + sql_code (str): + The SQL code which creates the stored + procedure. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + sql_code: str = proto.Field( + proto.STRING, + number=1, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + + +class FunctionEntity(proto.Message): + r"""Function's parent is a schema. + + Attributes: + sql_code (str): + The SQL code which creates the function. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + sql_code: str = proto.Field( + proto.STRING, + number=1, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + + +class SynonymEntity(proto.Message): + r"""Synonym's parent is a schema. + + Attributes: + source_entity (str): + The name of the entity for which the synonym + is being created (the source). + source_type (google.cloud.clouddms_v1.types.DatabaseEntityType): + The type of the entity for which the synonym + is being created (usually a table or a + sequence). + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + source_entity: str = proto.Field( + proto.STRING, + number=1, + ) + source_type: "DatabaseEntityType" = proto.Field( + proto.ENUM, + number=2, + enum="DatabaseEntityType", + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=3, + message=struct_pb2.Struct, + ) + + +class PackageEntity(proto.Message): + r"""Package's parent is a schema. + + Attributes: + package_sql_code (str): + The SQL code which creates the package. + package_body (str): + The SQL code which creates the package body. + If the package specification has cursors or + subprograms, then the package body is mandatory. + custom_features (google.protobuf.struct_pb2.Struct): + Custom engine specific features. + """ + + package_sql_code: str = proto.Field( + proto.STRING, + number=1, + ) + package_body: str = proto.Field( + proto.STRING, + number=2, + ) + custom_features: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=3, + message=struct_pb2.Struct, + ) + + +class EntityMapping(proto.Message): + r"""Details of the mappings of a database entity. + + Attributes: + source_entity (str): + Source entity full name. + The source entity can also be a column, index or + constraint using the same naming notation + schema.table.column. + draft_entity (str): + Target entity full name. + The draft entity can also include a column, + index or constraint using the same naming + notation schema.table.column. + source_type (google.cloud.clouddms_v1.types.DatabaseEntityType): + Type of source entity. + draft_type (google.cloud.clouddms_v1.types.DatabaseEntityType): + Type of draft entity. + mapping_log (MutableSequence[google.cloud.clouddms_v1.types.EntityMappingLogEntry]): + Entity mapping log entries. + Multiple rules can be effective and contribute + changes to a converted entity, such as a rule + can handle the entity name, another rule can + handle an entity type. In addition, rules which + did not change the entity are also logged along + with the reason preventing them to do so. + """ + + source_entity: str = proto.Field( + proto.STRING, + number=1, + ) + draft_entity: str = proto.Field( + proto.STRING, + number=2, + ) + source_type: "DatabaseEntityType" = proto.Field( + proto.ENUM, + number=4, + enum="DatabaseEntityType", + ) + draft_type: "DatabaseEntityType" = proto.Field( + proto.ENUM, + number=5, + enum="DatabaseEntityType", + ) + mapping_log: MutableSequence["EntityMappingLogEntry"] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="EntityMappingLogEntry", + ) + + +class EntityMappingLogEntry(proto.Message): + r"""A single record of a rule which was used for a mapping. + + Attributes: + rule_id (str): + Which rule caused this log entry. + rule_revision_id (str): + Rule revision ID. + mapping_comment (str): + Comment. + """ + + rule_id: str = proto.Field( + proto.STRING, + number=1, + ) + rule_revision_id: str = proto.Field( + proto.STRING, + number=2, + ) + mapping_comment: str = proto.Field( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py new file mode 100644 index 0000000..62f65c2 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApplyConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_apply_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ApplyConversionWorkspaceRequest( + connection_profile="connection_profile_value", + name="name_value", + ) + + # Make the request + operation = client.apply_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py new file mode 100644 index 0000000..d871897 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ApplyConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_apply_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ApplyConversionWorkspaceRequest( + connection_profile="connection_profile_value", + name="name_value", + ) + + # Make the request + operation = client.apply_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py new file mode 100644 index 0000000..ffb4df7 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CommitConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_commit_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.CommitConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.commit_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py new file mode 100644 index 0000000..7139db3 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CommitConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_commit_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.CommitConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.commit_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py new file mode 100644 index 0000000..2fbb94d --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ConvertConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_convert_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ConvertConversionWorkspaceRequest( + ) + + # Make the request + operation = client.convert_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py new file mode 100644 index 0000000..945b7aa --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ConvertConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_convert_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ConvertConversionWorkspaceRequest( + ) + + # Make the request + operation = client.convert_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py new file mode 100644 index 0000000..d8a0466 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_create_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.CreateConversionWorkspaceRequest( + parent="parent_value", + conversion_workspace_id="conversion_workspace_id_value", + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.create_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py new file mode 100644 index 0000000..0288f42 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_create_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.CreateConversionWorkspaceRequest( + parent="parent_value", + conversion_workspace_id="conversion_workspace_id_value", + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.create_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py new file mode 100644 index 0000000..eb459b7 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_create_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + private_connection = clouddms_v1.PrivateConnection() + private_connection.vpc_peering_config.vpc_name = "vpc_name_value" + private_connection.vpc_peering_config.subnet = "subnet_value" + + request = clouddms_v1.CreatePrivateConnectionRequest( + parent="parent_value", + private_connection_id="private_connection_id_value", + private_connection=private_connection, + ) + + # Make the request + operation = client.create_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py new file mode 100644 index 0000000..f1b9e9f --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_create_private_connection_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreatePrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_create_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + private_connection = clouddms_v1.PrivateConnection() + private_connection.vpc_peering_config.vpc_name = "vpc_name_value" + private_connection.vpc_peering_config.subnet = "subnet_value" + + request = clouddms_v1.CreatePrivateConnectionRequest( + parent="parent_value", + private_connection_id="private_connection_id_value", + private_connection=private_connection, + ) + + # Make the request + operation = client.create_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py new file mode 100644 index 0000000..1472d44 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_delete_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py new file mode 100644 index 0000000..0404763 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_delete_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeleteConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py new file mode 100644 index 0000000..adaa51c --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_delete_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DeletePrivateConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py new file mode 100644 index 0000000..841c724 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeletePrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_delete_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DeletePrivateConnectionRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_private_connection(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py new file mode 100644 index 0000000..e4cd078 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DescribeConversionWorkspaceRevisions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_describe_conversion_workspace_revisions(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeConversionWorkspaceRevisionsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = await client.describe_conversion_workspace_revisions(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py new file mode 100644 index 0000000..1952791 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DescribeConversionWorkspaceRevisions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_describe_conversion_workspace_revisions(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeConversionWorkspaceRevisionsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = client.describe_conversion_workspace_revisions(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py new file mode 100644 index 0000000..61ced4f --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DescribeDatabaseEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_describe_database_entities(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeDatabaseEntitiesRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + page_result = client.describe_database_entities(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py new file mode 100644 index 0000000..25821ef --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DescribeDatabaseEntities +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_describe_database_entities(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.DescribeDatabaseEntitiesRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + page_result = client.describe_database_entities(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py new file mode 100644 index 0000000..a1b178b --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchStaticIps +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_FetchStaticIps_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_fetch_static_ips(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.FetchStaticIpsRequest( + name="name_value", + ) + + # Make the request + page_result = client.fetch_static_ips(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_FetchStaticIps_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py new file mode 100644 index 0000000..563e9f1 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FetchStaticIps +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_FetchStaticIps_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_fetch_static_ips(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.FetchStaticIpsRequest( + name="name_value", + ) + + # Make the request + page_result = client.fetch_static_ips(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_FetchStaticIps_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py new file mode 100644 index 0000000..d15aa5c --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_get_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + response = await client.get_conversion_workspace(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py new file mode 100644 index 0000000..0ba92c8 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_get_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + response = client.get_conversion_workspace(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py new file mode 100644 index 0000000..fb0a8b3 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetPrivateConnection_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_get_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.GetPrivateConnectionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_private_connection(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetPrivateConnection_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py new file mode 100644 index 0000000..eacb889 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_get_private_connection_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetPrivateConnection +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_GetPrivateConnection_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_get_private_connection(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.GetPrivateConnectionRequest( + name="name_value", + ) + + # Make the request + response = client.get_private_connection(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_GetPrivateConnection_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py new file mode 100644 index 0000000..8234d57 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportMappingRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ImportMappingRules_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_import_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ImportMappingRulesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.import_mapping_rules(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ImportMappingRules_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py new file mode 100644 index 0000000..b5cbb03 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ImportMappingRules +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ImportMappingRules_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_import_mapping_rules(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ImportMappingRulesRequest( + parent="parent_value", + ) + + # Make the request + operation = client.import_mapping_rules(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ImportMappingRules_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py new file mode 100644 index 0000000..aa6d6b2 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversionWorkspaces +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_list_conversion_workspaces(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConversionWorkspacesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversion_workspaces(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py new file mode 100644 index 0000000..e9e5c74 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListConversionWorkspaces +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_list_conversion_workspaces(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListConversionWorkspacesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_conversion_workspaces(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py new file mode 100644 index 0000000..0149e98 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPrivateConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListPrivateConnections_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_list_private_connections(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.ListPrivateConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_private_connections(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListPrivateConnections_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py new file mode 100644 index 0000000..5eec2f6 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_list_private_connections_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListPrivateConnections +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_ListPrivateConnections_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_list_private_connections(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.ListPrivateConnectionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_private_connections(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END datamigration_v1_generated_DataMigrationService_ListPrivateConnections_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py new file mode 100644 index 0000000..c9e922c --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RollbackConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_rollback_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.RollbackConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py new file mode 100644 index 0000000..e8952f3 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RollbackConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_rollback_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.RollbackConversionWorkspaceRequest( + name="name_value", + ) + + # Make the request + operation = client.rollback_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py new file mode 100644 index 0000000..74794d4 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchBackgroundJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_search_background_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.SearchBackgroundJobsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = await client.search_background_jobs(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py new file mode 100644 index 0000000..4ab3279 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchBackgroundJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_search_background_jobs(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.SearchBackgroundJobsRequest( + conversion_workspace="conversion_workspace_value", + ) + + # Make the request + response = client.search_background_jobs(request=request) + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py new file mode 100644 index 0000000..46bdf92 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SeedConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_seed_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + request = clouddms_v1.SeedConversionWorkspaceRequest( + source_connection_profile="source_connection_profile_value", + ) + + # Make the request + operation = client.seed_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py new file mode 100644 index 0000000..647c3d2 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SeedConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_seed_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + request = clouddms_v1.SeedConversionWorkspaceRequest( + source_connection_profile="source_connection_profile_value", + ) + + # Make the request + operation = client.seed_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_sync] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py new file mode 100644 index 0000000..3361ce9 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +async def sample_update_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceAsyncClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.UpdateConversionWorkspaceRequest( + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.update_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_async] diff --git a/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py b/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py new file mode 100644 index 0000000..2a1ba33 --- /dev/null +++ b/samples/generated_samples/datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateConversionWorkspace +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dms + + +# [START datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import clouddms_v1 + + +def sample_update_conversion_workspace(): + # Create a client + client = clouddms_v1.DataMigrationServiceClient() + + # Initialize request argument(s) + conversion_workspace = clouddms_v1.ConversionWorkspace() + conversion_workspace.source.engine = "ORACLE" + conversion_workspace.source.version = "version_value" + conversion_workspace.destination.engine = "ORACLE" + conversion_workspace.destination.version = "version_value" + + request = clouddms_v1.UpdateConversionWorkspaceRequest( + conversion_workspace=conversion_workspace, + ) + + # Make the request + operation = client.update_conversion_workspace(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_sync] diff --git a/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json index 94aa0fb..10d2ba4 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dms", - "version": "1.6.2" + "version": "0.1.0" }, "snippets": [ { @@ -19,31 +19,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_connection_profile", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.apply_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConnectionProfile", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ApplyConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "CreateConnectionProfile" + "shortName": "ApplyConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.CreateConnectionProfileRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "connection_profile", - "type": "google.cloud.clouddms_v1.types.ConnectionProfile" - }, - { - "name": "connection_profile_id", - "type": "str" + "type": "google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest" }, { "name": "retry", @@ -59,21 +47,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_connection_profile" + "shortName": "apply_conversion_workspace" }, - "description": "Sample for CreateConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_create_connection_profile_async.py", + "description": "Sample for ApplyConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_async", "segments": [ { - "end": 63, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 63, + "end": 56, "start": 27, "type": "SHORT" }, @@ -83,22 +71,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 53, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 60, - "start": 54, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 64, - "start": 61, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_create_connection_profile_async.py" + "title": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_async.py" }, { "canonical": true, @@ -107,31 +95,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_connection_profile", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.apply_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConnectionProfile", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ApplyConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "CreateConnectionProfile" + "shortName": "ApplyConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.CreateConnectionProfileRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "connection_profile", - "type": "google.cloud.clouddms_v1.types.ConnectionProfile" - }, - { - "name": "connection_profile_id", - "type": "str" + "type": "google.cloud.clouddms_v1.types.ApplyConversionWorkspaceRequest" }, { "name": "retry", @@ -147,21 +123,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_connection_profile" + "shortName": "apply_conversion_workspace" }, - "description": "Sample for CreateConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py", + "description": "Sample for ApplyConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_ApplyConversionWorkspace_sync", "segments": [ { - "end": 63, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 63, + "end": 56, "start": 27, "type": "SHORT" }, @@ -171,22 +147,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 53, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 60, - "start": 54, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 64, - "start": 61, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py" + "title": "datamigration_v1_generated_data_migration_service_apply_conversion_workspace_sync.py" }, { "canonical": true, @@ -196,31 +172,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.commit_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CommitConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "CreateMigrationJob" + "shortName": "CommitConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.CreateMigrationJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "migration_job", - "type": "google.cloud.clouddms_v1.types.MigrationJob" - }, - { - "name": "migration_job_id", - "type": "str" + "type": "google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest" }, { "name": "retry", @@ -236,21 +200,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_migration_job" + "shortName": "commit_conversion_workspace" }, - "description": "Sample for CreateMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_create_migration_job_async.py", + "description": "Sample for CommitConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CreateMigrationJob_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_async", "segments": [ { - "end": 64, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 64, + "end": 55, "start": 27, "type": "SHORT" }, @@ -260,22 +224,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 54, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 61, - "start": 55, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 65, - "start": 62, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_create_migration_job_async.py" + "title": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_async.py" }, { "canonical": true, @@ -284,31 +248,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.commit_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CommitConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "CreateMigrationJob" + "shortName": "CommitConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.CreateMigrationJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "migration_job", - "type": "google.cloud.clouddms_v1.types.MigrationJob" - }, - { - "name": "migration_job_id", - "type": "str" + "type": "google.cloud.clouddms_v1.types.CommitConversionWorkspaceRequest" }, { "name": "retry", @@ -324,21 +276,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_migration_job" + "shortName": "commit_conversion_workspace" }, - "description": "Sample for CreateMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_create_migration_job_sync.py", + "description": "Sample for CommitConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_CreateMigrationJob_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_CommitConversionWorkspace_sync", "segments": [ { - "end": 64, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 64, + "end": 55, "start": 27, "type": "SHORT" }, @@ -348,22 +300,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 54, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 61, - "start": 55, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 65, - "start": 62, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_create_migration_job_sync.py" + "title": "datamigration_v1_generated_data_migration_service_commit_conversion_workspace_sync.py" }, { "canonical": true, @@ -373,23 +325,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_connection_profile", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.convert_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConnectionProfile", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ConvertConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "DeleteConnectionProfile" + "shortName": "ConvertConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest" }, { "name": "retry", @@ -405,21 +353,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_connection_profile" + "shortName": "convert_conversion_workspace" }, - "description": "Sample for DeleteConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py", + "description": "Sample for ConvertConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_async", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -429,22 +377,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py" + "title": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_async.py" }, { "canonical": true, @@ -453,23 +401,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_connection_profile", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.convert_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConnectionProfile", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ConvertConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "DeleteConnectionProfile" + "shortName": "ConvertConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.clouddms_v1.types.ConvertConversionWorkspaceRequest" }, { "name": "retry", @@ -485,21 +429,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_connection_profile" + "shortName": "convert_conversion_workspace" }, - "description": "Sample for DeleteConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py", + "description": "Sample for ConvertConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_ConvertConversionWorkspace_sync", "segments": [ { - "end": 55, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 54, "start": 27, "type": "SHORT" }, @@ -509,22 +453,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py" + "title": "datamigration_v1_generated_data_migration_service_convert_conversion_workspace_sync.py" }, { "canonical": true, @@ -534,22 +478,30 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_connection_profile", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConnectionProfile", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "DeleteMigrationJob" + "shortName": "CreateConnectionProfile" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.DeleteMigrationJobRequest" + "type": "google.cloud.clouddms_v1.types.CreateConnectionProfileRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "connection_profile", + "type": "google.cloud.clouddms_v1.types.ConnectionProfile" + }, + { + "name": "connection_profile_id", "type": "str" }, { @@ -566,21 +518,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_migration_job" + "shortName": "create_connection_profile" }, - "description": "Sample for DeleteMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_delete_migration_job_async.py", + "description": "Sample for CreateConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_create_connection_profile_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_async", "segments": [ { - "end": 55, + "end": 63, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 63, "start": 27, "type": "SHORT" }, @@ -590,22 +542,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 53, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 46, + "end": 60, + "start": 54, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 64, + "start": 61, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_delete_migration_job_async.py" + "title": "datamigration_v1_generated_data_migration_service_create_connection_profile_async.py" }, { "canonical": true, @@ -614,22 +566,30 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_connection_profile", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConnectionProfile", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "DeleteMigrationJob" + "shortName": "CreateConnectionProfile" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.DeleteMigrationJobRequest" + "type": "google.cloud.clouddms_v1.types.CreateConnectionProfileRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "connection_profile", + "type": "google.cloud.clouddms_v1.types.ConnectionProfile" + }, + { + "name": "connection_profile_id", "type": "str" }, { @@ -646,13 +606,2605 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_migration_job" + "shortName": "create_connection_profile" }, - "description": "Sample for DeleteMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py", + "description": "Sample for CreateConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConnectionProfile_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_connection_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversion_workspace", + "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" + }, + { + "name": "conversion_workspace_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_conversion_workspace" + }, + "description": "Sample for CreateConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateConversionWorkspaceRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "conversion_workspace", + "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" + }, + { + "name": "conversion_workspace_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_conversion_workspace" + }, + "description": "Sample for CreateConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateConversionWorkspace_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateMigrationJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "migration_job", + "type": "google.cloud.clouddms_v1.types.MigrationJob" + }, + { + "name": "migration_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_migration_job" + }, + "description": "Sample for CreateMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_create_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateMigrationJob_async", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreateMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreateMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreateMigrationJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "migration_job", + "type": "google.cloud.clouddms_v1.types.MigrationJob" + }, + { + "name": "migration_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_migration_job" + }, + "description": "Sample for CreateMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_create_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreateMigrationJob_sync", + "segments": [ + { + "end": 64, + "start": 27, + "type": "FULL" + }, + { + "end": 64, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 54, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 61, + "start": 55, + "type": "REQUEST_EXECUTION" + }, + { + "end": 65, + "start": 62, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.create_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreatePrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreatePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "private_connection", + "type": "google.cloud.clouddms_v1.types.PrivateConnection" + }, + { + "name": "private_connection_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_private_connection" + }, + "description": "Sample for CreatePrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_create_private_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_private_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.create_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.CreatePrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "CreatePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.CreatePrivateConnectionRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "private_connection", + "type": "google.cloud.clouddms_v1.types.PrivateConnection" + }, + { + "name": "private_connection_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_private_connection" + }, + "description": "Sample for CreatePrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_create_private_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_CreatePrivateConnection_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_create_private_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_connection_profile" + }, + "description": "Sample for DeleteConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_connection_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteConnectionProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_connection_profile" + }, + "description": "Sample for DeleteConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConnectionProfile_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_connection_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_conversion_workspace" + }, + "description": "Sample for DeleteConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteConversionWorkspaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_conversion_workspace" + }, + "description": "Sample for DeleteConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteConversionWorkspace_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteMigrationJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_migration_job" + }, + "description": "Sample for DeleteMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_delete_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeleteMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeleteMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeleteMigrationJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_migration_job" + }, + "description": "Sample for DeleteMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeleteMigrationJob_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.delete_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeletePrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeletePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_private_connection" + }, + "description": "Sample for DeletePrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_delete_private_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_private_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.delete_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DeletePrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DeletePrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DeletePrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_private_connection" + }, + "description": "Sample for DeletePrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DeletePrivateConnection_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_delete_private_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.describe_conversion_workspace_revisions", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeConversionWorkspaceRevisions", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DescribeConversionWorkspaceRevisions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse", + "shortName": "describe_conversion_workspace_revisions" + }, + "description": "Sample for DescribeConversionWorkspaceRevisions", + "file": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.describe_conversion_workspace_revisions", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeConversionWorkspaceRevisions", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DescribeConversionWorkspaceRevisions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.DescribeConversionWorkspaceRevisionsResponse", + "shortName": "describe_conversion_workspace_revisions" + }, + "description": "Sample for DescribeConversionWorkspaceRevisions", + "file": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeConversionWorkspaceRevisions_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_describe_conversion_workspace_revisions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.describe_database_entities", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeDatabaseEntities", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DescribeDatabaseEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesAsyncPager", + "shortName": "describe_database_entities" + }, + "description": "Sample for DescribeDatabaseEntities", + "file": "datamigration_v1_generated_data_migration_service_describe_database_entities_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_describe_database_entities_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.describe_database_entities", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.DescribeDatabaseEntities", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "DescribeDatabaseEntities" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.DescribeDatabaseEntitiesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.DescribeDatabaseEntitiesPager", + "shortName": "describe_database_entities" + }, + "description": "Sample for DescribeDatabaseEntities", + "file": "datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_DescribeDatabaseEntities_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_describe_database_entities_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.fetch_static_ips", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.FetchStaticIps", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "FetchStaticIps" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.FetchStaticIpsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsAsyncPager", + "shortName": "fetch_static_ips" + }, + "description": "Sample for FetchStaticIps", + "file": "datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_FetchStaticIps_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_fetch_static_ips_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.fetch_static_ips", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.FetchStaticIps", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "FetchStaticIps" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.FetchStaticIpsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.FetchStaticIpsPager", + "shortName": "fetch_static_ips" + }, + "description": "Sample for FetchStaticIps", + "file": "datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_FetchStaticIps_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_fetch_static_ips_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.generate_ssh_script", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GenerateSshScript", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GenerateSshScript" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GenerateSshScriptRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.SshScript", + "shortName": "generate_ssh_script" + }, + "description": "Sample for GenerateSshScript", + "file": "datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GenerateSshScript_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.generate_ssh_script", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GenerateSshScript", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GenerateSshScript" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GenerateSshScriptRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.SshScript", + "shortName": "generate_ssh_script" + }, + "description": "Sample for GenerateSshScript", + "file": "datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GenerateSshScript_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetConnectionProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.ConnectionProfile", + "shortName": "get_connection_profile" + }, + "description": "Sample for GetConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_get_connection_profile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetConnectionProfile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_connection_profile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_connection_profile", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConnectionProfile", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetConnectionProfile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetConnectionProfileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.ConnectionProfile", + "shortName": "get_connection_profile" + }, + "description": "Sample for GetConnectionProfile", + "file": "datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetConnectionProfile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.ConversionWorkspace", + "shortName": "get_conversion_workspace" + }, + "description": "Sample for GetConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetConversionWorkspaceRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.ConversionWorkspace", + "shortName": "get_conversion_workspace" + }, + "description": "Sample for GetConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetConversionWorkspace_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_conversion_workspace_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetMigrationJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.MigrationJob", + "shortName": "get_migration_job" + }, + "description": "Sample for GetMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_get_migration_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetMigrationJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_migration_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_migration_job", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetMigrationJob", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetMigrationJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetMigrationJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.MigrationJob", + "shortName": "get_migration_job" + }, + "description": "Sample for GetMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_get_migration_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetMigrationJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_migration_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetPrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetPrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetPrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.PrivateConnection", + "shortName": "get_private_connection" + }, + "description": "Sample for GetPrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_get_private_connection_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetPrivateConnection_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_private_connection_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_private_connection", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetPrivateConnection", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "GetPrivateConnection" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.GetPrivateConnectionRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.types.PrivateConnection", + "shortName": "get_private_connection" + }, + "description": "Sample for GetPrivateConnection", + "file": "datamigration_v1_generated_data_migration_service_get_private_connection_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_GetPrivateConnection_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_get_private_connection_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.import_mapping_rules", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ImportMappingRules", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ImportMappingRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ImportMappingRulesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "import_mapping_rules" + }, + "description": "Sample for ImportMappingRules", + "file": "datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ImportMappingRules_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_import_mapping_rules_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.import_mapping_rules", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ImportMappingRules", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ImportMappingRules" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ImportMappingRulesRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "import_mapping_rules" + }, + "description": "Sample for ImportMappingRules", + "file": "datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ImportMappingRules_sync", "segments": [ { "end": 55, @@ -660,7 +3212,249 @@ "type": "FULL" }, { - "end": 55, + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_import_mapping_rules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_connection_profiles", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConnectionProfiles", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListConnectionProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListConnectionProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesAsyncPager", + "shortName": "list_connection_profiles" + }, + "description": "Sample for ListConnectionProfiles", + "file": "datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_connection_profiles", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConnectionProfiles", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListConnectionProfiles" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListConnectionProfilesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesPager", + "shortName": "list_connection_profiles" + }, + "description": "Sample for ListConnectionProfiles", + "file": "datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_conversion_workspaces", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConversionWorkspaces", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListConversionWorkspaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesAsyncPager", + "shortName": "list_conversion_workspaces" + }, + "description": "Sample for ListConversionWorkspaces", + "file": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, "start": 27, "type": "SHORT" }, @@ -674,18 +3468,98 @@ "start": 41, "type": "REQUEST_INITIALIZATION" }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_conversion_workspaces", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConversionWorkspaces", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "ListConversionWorkspaces" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.ListConversionWorkspacesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConversionWorkspacesPager", + "shortName": "list_conversion_workspaces" + }, + "description": "Sample for ListConversionWorkspaces", + "file": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListConversionWorkspaces_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, { "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_delete_migration_job_sync.py" + "title": "datamigration_v1_generated_data_migration_service_list_conversion_workspaces_sync.py" }, { "canonical": true, @@ -695,19 +3569,23 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.generate_ssh_script", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_migration_jobs", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GenerateSshScript", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListMigrationJobs", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "GenerateSshScript" + "shortName": "ListMigrationJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.GenerateSshScriptRequest" + "type": "google.cloud.clouddms_v1.types.ListMigrationJobsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -722,22 +3600,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.types.SshScript", - "shortName": "generate_ssh_script" + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsAsyncPager", + "shortName": "list_migration_jobs" }, - "description": "Sample for GenerateSshScript", - "file": "datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py", + "description": "Sample for ListMigrationJobs", + "file": "datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GenerateSshScript_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListMigrationJobs_async", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -747,22 +3625,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_generate_ssh_script_async.py" + "title": "datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py" }, { "canonical": true, @@ -771,19 +3649,23 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.generate_ssh_script", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_migration_jobs", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GenerateSshScript", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListMigrationJobs", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "GenerateSshScript" + "shortName": "ListMigrationJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.GenerateSshScriptRequest" + "type": "google.cloud.clouddms_v1.types.ListMigrationJobsRequest" + }, + { + "name": "parent", + "type": "str" }, { "name": "retry", @@ -798,22 +3680,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.types.SshScript", - "shortName": "generate_ssh_script" + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsPager", + "shortName": "list_migration_jobs" }, - "description": "Sample for GenerateSshScript", - "file": "datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py", + "description": "Sample for ListMigrationJobs", + "file": "datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GenerateSshScript_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListMigrationJobs_sync", "segments": [ { - "end": 55, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 52, "start": 27, "type": "SHORT" }, @@ -823,22 +3705,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 49, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 50, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_generate_ssh_script_sync.py" + "title": "datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py" }, { "canonical": true, @@ -848,22 +3730,22 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_connection_profile", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_private_connections", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConnectionProfile", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListPrivateConnections", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "GetConnectionProfile" + "shortName": "ListPrivateConnections" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.GetConnectionProfileRequest" + "type": "google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -879,22 +3761,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.types.ConnectionProfile", - "shortName": "get_connection_profile" + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsAsyncPager", + "shortName": "list_private_connections" }, - "description": "Sample for GetConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_get_connection_profile_async.py", + "description": "Sample for ListPrivateConnections", + "file": "datamigration_v1_generated_data_migration_service_list_private_connections_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GetConnectionProfile_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListPrivateConnections_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -914,12 +3796,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_get_connection_profile_async.py" + "title": "datamigration_v1_generated_data_migration_service_list_private_connections_async.py" }, { "canonical": true, @@ -928,22 +3810,22 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_connection_profile", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_private_connections", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetConnectionProfile", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListPrivateConnections", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "GetConnectionProfile" + "shortName": "ListPrivateConnections" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.GetConnectionProfileRequest" + "type": "google.cloud.clouddms_v1.types.ListPrivateConnectionsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -959,22 +3841,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.types.ConnectionProfile", - "shortName": "get_connection_profile" + "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListPrivateConnectionsPager", + "shortName": "list_private_connections" }, - "description": "Sample for GetConnectionProfile", - "file": "datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py", + "description": "Sample for ListPrivateConnections", + "file": "datamigration_v1_generated_data_migration_service_list_private_connections_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GetConnectionProfile_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_ListPrivateConnections_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -994,12 +3876,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_get_connection_profile_sync.py" + "title": "datamigration_v1_generated_data_migration_service_list_private_connections_sync.py" }, { "canonical": true, @@ -1009,23 +3891,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.get_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.promote_migration_job", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.PromoteMigrationJob", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "GetMigrationJob" + "shortName": "PromoteMigrationJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.GetMigrationJobRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.clouddms_v1.types.PromoteMigrationJobRequest" }, { "name": "retry", @@ -1040,22 +3918,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.types.MigrationJob", - "shortName": "get_migration_job" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "promote_migration_job" }, - "description": "Sample for GetMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_get_migration_job_async.py", + "description": "Sample for PromoteMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_promote_migration_job_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GetMigrationJob_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_async", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1065,22 +3943,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_get_migration_job_async.py" + "title": "datamigration_v1_generated_data_migration_service_promote_migration_job_async.py" }, { "canonical": true, @@ -1089,23 +3967,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.get_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.promote_migration_job", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.GetMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.PromoteMigrationJob", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "GetMigrationJob" + "shortName": "PromoteMigrationJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.GetMigrationJobRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.cloud.clouddms_v1.types.PromoteMigrationJobRequest" }, { "name": "retry", @@ -1120,22 +3994,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.types.MigrationJob", - "shortName": "get_migration_job" + "resultType": "google.api_core.operation.Operation", + "shortName": "promote_migration_job" }, - "description": "Sample for GetMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_get_migration_job_sync.py", + "description": "Sample for PromoteMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_GetMigrationJob_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_sync", "segments": [ { - "end": 51, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1145,22 +4019,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_get_migration_job_sync.py" + "title": "datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py" }, { "canonical": true, @@ -1170,23 +4044,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_connection_profiles", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.restart_migration_job", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConnectionProfiles", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.RestartMigrationJob", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "ListConnectionProfiles" + "shortName": "RestartMigrationJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.ListConnectionProfilesRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.clouddms_v1.types.RestartMigrationJobRequest" }, { "name": "retry", @@ -1201,22 +4071,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesAsyncPager", - "shortName": "list_connection_profiles" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "restart_migration_job" }, - "description": "Sample for ListConnectionProfiles", - "file": "datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py", + "description": "Sample for RestartMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_restart_migration_job_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_RestartMigrationJob_async", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1226,22 +4096,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_list_connection_profiles_async.py" + "title": "datamigration_v1_generated_data_migration_service_restart_migration_job_async.py" }, { "canonical": true, @@ -1250,23 +4120,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_connection_profiles", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.restart_migration_job", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListConnectionProfiles", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.RestartMigrationJob", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "ListConnectionProfiles" + "shortName": "RestartMigrationJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.ListConnectionProfilesRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.clouddms_v1.types.RestartMigrationJobRequest" }, { "name": "retry", @@ -1281,22 +4147,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListConnectionProfilesPager", - "shortName": "list_connection_profiles" + "resultType": "google.api_core.operation.Operation", + "shortName": "restart_migration_job" }, - "description": "Sample for ListConnectionProfiles", - "file": "datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py", + "description": "Sample for RestartMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ListConnectionProfiles_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_RestartMigrationJob_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1306,22 +4172,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_list_connection_profiles_sync.py" + "title": "datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py" }, { "canonical": true, @@ -1331,23 +4197,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.list_migration_jobs", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.resume_migration_job", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListMigrationJobs", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ResumeMigrationJob", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "ListMigrationJobs" + "shortName": "ResumeMigrationJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.ListMigrationJobsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.clouddms_v1.types.ResumeMigrationJobRequest" }, { "name": "retry", @@ -1362,22 +4224,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsAsyncPager", - "shortName": "list_migration_jobs" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "resume_migration_job" }, - "description": "Sample for ListMigrationJobs", - "file": "datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py", + "description": "Sample for ResumeMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_resume_migration_job_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ListMigrationJobs_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_async", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1387,22 +4249,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_list_migration_jobs_async.py" + "title": "datamigration_v1_generated_data_migration_service_resume_migration_job_async.py" }, { "canonical": true, @@ -1411,23 +4273,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.list_migration_jobs", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.resume_migration_job", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ListMigrationJobs", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.ResumeMigrationJob", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "ListMigrationJobs" + "shortName": "ResumeMigrationJob" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.ListMigrationJobsRequest" - }, - { - "name": "parent", - "type": "str" + "type": "google.cloud.clouddms_v1.types.ResumeMigrationJobRequest" }, { "name": "retry", @@ -1442,22 +4300,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.cloud.clouddms_v1.services.data_migration_service.pagers.ListMigrationJobsPager", - "shortName": "list_migration_jobs" + "resultType": "google.api_core.operation.Operation", + "shortName": "resume_migration_job" }, - "description": "Sample for ListMigrationJobs", - "file": "datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py", + "description": "Sample for ResumeMigrationJob", + "file": "datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ListMigrationJobs_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -1467,22 +4325,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 44, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 51, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_list_migration_jobs_sync.py" + "title": "datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py" }, { "canonical": true, @@ -1492,19 +4350,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.promote_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.rollback_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.PromoteMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.RollbackConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "PromoteMigrationJob" + "shortName": "RollbackConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.PromoteMigrationJobRequest" + "type": "google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest" }, { "name": "retry", @@ -1520,21 +4378,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "promote_migration_job" + "shortName": "rollback_conversion_workspace" }, - "description": "Sample for PromoteMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_promote_migration_job_async.py", + "description": "Sample for RollbackConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_async", "segments": [ { - "end": 54, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1544,22 +4402,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_promote_migration_job_async.py" + "title": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_async.py" }, { "canonical": true, @@ -1568,19 +4426,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.promote_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.rollback_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.PromoteMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.RollbackConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "PromoteMigrationJob" + "shortName": "RollbackConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.PromoteMigrationJobRequest" + "type": "google.cloud.clouddms_v1.types.RollbackConversionWorkspaceRequest" }, { "name": "retry", @@ -1596,21 +4454,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "promote_migration_job" + "shortName": "rollback_conversion_workspace" }, - "description": "Sample for PromoteMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py", + "description": "Sample for RollbackConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_PromoteMigrationJob_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_RollbackConversionWorkspace_sync", "segments": [ { - "end": 54, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1620,22 +4478,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_promote_migration_job_sync.py" + "title": "datamigration_v1_generated_data_migration_service_rollback_conversion_workspace_sync.py" }, { "canonical": true, @@ -1645,19 +4503,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.restart_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.search_background_jobs", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.RestartMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.SearchBackgroundJobs", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "RestartMigrationJob" + "shortName": "SearchBackgroundJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.RestartMigrationJobRequest" + "type": "google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest" }, { "name": "retry", @@ -1672,22 +4530,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "restart_migration_job" + "resultType": "google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse", + "shortName": "search_background_jobs" }, - "description": "Sample for RestartMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_restart_migration_job_async.py", + "description": "Sample for SearchBackgroundJobs", + "file": "datamigration_v1_generated_data_migration_service_search_background_jobs_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_RestartMigrationJob_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_async", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1697,22 +4555,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_restart_migration_job_async.py" + "title": "datamigration_v1_generated_data_migration_service_search_background_jobs_async.py" }, { "canonical": true, @@ -1721,19 +4579,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.restart_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.search_background_jobs", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.RestartMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.SearchBackgroundJobs", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "RestartMigrationJob" + "shortName": "SearchBackgroundJobs" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.RestartMigrationJobRequest" + "type": "google.cloud.clouddms_v1.types.SearchBackgroundJobsRequest" }, { "name": "retry", @@ -1748,22 +4606,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "restart_migration_job" + "resultType": "google.cloud.clouddms_v1.types.SearchBackgroundJobsResponse", + "shortName": "search_background_jobs" }, - "description": "Sample for RestartMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py", + "description": "Sample for SearchBackgroundJobs", + "file": "datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_RestartMigrationJob_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_SearchBackgroundJobs_sync", "segments": [ { - "end": 54, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1773,22 +4631,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_restart_migration_job_sync.py" + "title": "datamigration_v1_generated_data_migration_service_search_background_jobs_sync.py" }, { "canonical": true, @@ -1798,19 +4656,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", "shortName": "DataMigrationServiceAsyncClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.resume_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.seed_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ResumeMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.SeedConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "ResumeMigrationJob" + "shortName": "SeedConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.ResumeMigrationJobRequest" + "type": "google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest" }, { "name": "retry", @@ -1826,21 +4684,21 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "resume_migration_job" + "shortName": "seed_conversion_workspace" }, - "description": "Sample for ResumeMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_resume_migration_job_async.py", + "description": "Sample for SeedConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_async", + "regionTag": "datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_async", "segments": [ { - "end": 54, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1850,22 +4708,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_resume_migration_job_async.py" + "title": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_async.py" }, { "canonical": true, @@ -1874,19 +4732,19 @@ "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", "shortName": "DataMigrationServiceClient" }, - "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.resume_migration_job", + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.seed_conversion_workspace", "method": { - "fullName": "google.cloud.clouddms.v1.DataMigrationService.ResumeMigrationJob", + "fullName": "google.cloud.clouddms.v1.DataMigrationService.SeedConversionWorkspace", "service": { "fullName": "google.cloud.clouddms.v1.DataMigrationService", "shortName": "DataMigrationService" }, - "shortName": "ResumeMigrationJob" + "shortName": "SeedConversionWorkspace" }, "parameters": [ { "name": "request", - "type": "google.cloud.clouddms_v1.types.ResumeMigrationJobRequest" + "type": "google.cloud.clouddms_v1.types.SeedConversionWorkspaceRequest" }, { "name": "retry", @@ -1902,21 +4760,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "resume_migration_job" + "shortName": "seed_conversion_workspace" }, - "description": "Sample for ResumeMigrationJob", - "file": "datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py", + "description": "Sample for SeedConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "datamigration_v1_generated_DataMigrationService_ResumeMigrationJob_sync", + "regionTag": "datamigration_v1_generated_DataMigrationService_SeedConversionWorkspace_sync", "segments": [ { - "end": 54, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1926,22 +4784,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 51, - "start": 45, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 52, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "datamigration_v1_generated_data_migration_service_resume_migration_job_sync.py" + "title": "datamigration_v1_generated_data_migration_service_seed_conversion_workspace_sync.py" }, { "canonical": true, @@ -2418,6 +5276,175 @@ ], "title": "datamigration_v1_generated_data_migration_service_update_connection_profile_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient", + "shortName": "DataMigrationServiceAsyncClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceAsyncClient.update_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "UpdateConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest" + }, + { + "name": "conversion_workspace", + "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_conversion_workspace" + }, + "description": "Sample for UpdateConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_async", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient", + "shortName": "DataMigrationServiceClient" + }, + "fullName": "google.cloud.clouddms_v1.DataMigrationServiceClient.update_conversion_workspace", + "method": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService.UpdateConversionWorkspace", + "service": { + "fullName": "google.cloud.clouddms.v1.DataMigrationService", + "shortName": "DataMigrationService" + }, + "shortName": "UpdateConversionWorkspace" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.clouddms_v1.types.UpdateConversionWorkspaceRequest" + }, + { + "name": "conversion_workspace", + "type": "google.cloud.clouddms_v1.types.ConversionWorkspace" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_conversion_workspace" + }, + "description": "Sample for UpdateConversionWorkspace", + "file": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datamigration_v1_generated_DataMigrationService_UpdateConversionWorkspace_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datamigration_v1_generated_data_migration_service_update_conversion_workspace_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/scripts/fixup_clouddms_v1_keywords.py b/scripts/fixup_clouddms_v1_keywords.py index 10c3dee..ad6ad15 100644 --- a/scripts/fixup_clouddms_v1_keywords.py +++ b/scripts/fixup_clouddms_v1_keywords.py @@ -39,21 +39,40 @@ def partition( class clouddmsCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_connection_profile': ('parent', 'connection_profile_id', 'connection_profile', 'request_id', ), + 'apply_conversion_workspace': ('name', 'filter', 'connection_profile', ), + 'commit_conversion_workspace': ('name', 'commit_name', ), + 'convert_conversion_workspace': ('name', 'auto_commit', 'filter', ), + 'create_connection_profile': ('parent', 'connection_profile_id', 'connection_profile', 'request_id', 'validate_only', 'skip_validation', ), + 'create_conversion_workspace': ('parent', 'conversion_workspace_id', 'conversion_workspace', 'request_id', ), 'create_migration_job': ('parent', 'migration_job_id', 'migration_job', 'request_id', ), + 'create_private_connection': ('parent', 'private_connection_id', 'private_connection', 'request_id', 'skip_validation', ), 'delete_connection_profile': ('name', 'request_id', 'force', ), + 'delete_conversion_workspace': ('name', 'request_id', ), 'delete_migration_job': ('name', 'request_id', 'force', ), + 'delete_private_connection': ('name', 'request_id', ), + 'describe_conversion_workspace_revisions': ('conversion_workspace', 'commit_id', ), + 'describe_database_entities': ('conversion_workspace', 'page_size', 'page_token', 'tree', 'uncommitted', 'commit_id', 'filter', ), + 'fetch_static_ips': ('name', 'page_size', 'page_token', ), 'generate_ssh_script': ('vm', 'migration_job', 'vm_creation_config', 'vm_selection_config', 'vm_port', ), 'get_connection_profile': ('name', ), + 'get_conversion_workspace': ('name', ), 'get_migration_job': ('name', ), + 'get_private_connection': ('name', ), + 'import_mapping_rules': ('parent', 'rules_format', 'rules_files', 'auto_commit', ), 'list_connection_profiles': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_conversion_workspaces': ('parent', 'page_size', 'page_token', 'filter', ), 'list_migration_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_private_connections': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'promote_migration_job': ('name', ), 'restart_migration_job': ('name', ), 'resume_migration_job': ('name', ), + 'rollback_conversion_workspace': ('name', ), + 'search_background_jobs': ('conversion_workspace', 'return_most_recent_per_job_type', 'max_size', 'completed_until_time', ), + 'seed_conversion_workspace': ('name', 'auto_commit', 'source_connection_profile', 'destination_connection_profile', ), 'start_migration_job': ('name', ), 'stop_migration_job': ('name', ), - 'update_connection_profile': ('update_mask', 'connection_profile', 'request_id', ), + 'update_connection_profile': ('update_mask', 'connection_profile', 'request_id', 'validate_only', 'skip_validation', ), + 'update_conversion_workspace': ('update_mask', 'conversion_workspace', 'request_id', ), 'update_migration_job': ('update_mask', 'migration_job', 'request_id', ), 'verify_migration_job': ('name', ), } diff --git a/setup.py b/setup.py index 8ce5121..dc4db60 100644 --- a/setup.py +++ b/setup.py @@ -40,6 +40,7 @@ "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, <1.0.0dev", ] url = "https://github.com/googleapis/python-dms" diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt index ed7f9ae..ad3f0fa 100644 --- a/testing/constraints-3.10.txt +++ b/testing/constraints-3.10.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt index ed7f9ae..ad3f0fa 100644 --- a/testing/constraints-3.11.txt +++ b/testing/constraints-3.11.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt index ed7f9ae..ad3f0fa 100644 --- a/testing/constraints-3.12.txt +++ b/testing/constraints-3.12.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index 6c44adf..2beecf9 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -7,3 +7,4 @@ google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 +grpc-google-iam-v1==0.12.4 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index ed7f9ae..ad3f0fa 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index ed7f9ae..ad3f0fa 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -4,3 +4,4 @@ google-api-core proto-plus protobuf +grpc-google-iam-v1 diff --git a/tests/unit/gapic/clouddms_v1/test_data_migration_service.py b/tests/unit/gapic/clouddms_v1/test_data_migration_service.py index 8815fd3..8d4ed14 100644 --- a/tests/unit/gapic/clouddms_v1/test_data_migration_service.py +++ b/tests/unit/gapic/clouddms_v1/test_data_migration_service.py @@ -39,6 +39,10 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore @@ -60,7 +64,11 @@ pagers, transports, ) -from google.cloud.clouddms_v1.types import clouddms, clouddms_resources +from google.cloud.clouddms_v1.types import ( + clouddms, + clouddms_resources, + conversionworkspace_resources, +) def client_cert_source_callback(): @@ -1197,6 +1205,8 @@ def test_get_migration_job(request_type, transport: str = "grpc"): dump_path="dump_path_value", source="source_value", destination="destination_value", + filter="filter_value", + cmek_key_name="cmek_key_name_value", reverse_ssh_connectivity=clouddms_resources.ReverseSshConnectivity( vm_ip="vm_ip_value" ), @@ -1218,6 +1228,8 @@ def test_get_migration_job(request_type, transport: str = "grpc"): assert response.dump_path == "dump_path_value" assert response.source == "source_value" assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.cmek_key_name == "cmek_key_name_value" def test_get_migration_job_empty_call(): @@ -1266,6 +1278,8 @@ async def test_get_migration_job_async( dump_path="dump_path_value", source="source_value", destination="destination_value", + filter="filter_value", + cmek_key_name="cmek_key_name_value", ) ) response = await client.get_migration_job(request) @@ -1285,6 +1299,8 @@ async def test_get_migration_job_async( assert response.dump_path == "dump_path_value" assert response.source == "source_value" assert response.destination == "destination_value" + assert response.filter == "filter_value" + assert response.cmek_key_name == "cmek_key_name_value" @pytest.mark.asyncio @@ -4735,742 +4751,6926 @@ async def test_delete_connection_profile_flattened_error_async(): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataMigrationServiceGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + clouddms.CreatePrivateConnectionRequest, + dict, + ], +) +def test_create_private_connection(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - with pytest.raises(ValueError): - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataMigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataMigrationServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # It is an error to provide an api_key and a transport instance. - transport = transports.DataMigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataMigrationServiceClient( - client_options=options, - transport=transport, - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_private_connection(request) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataMigrationServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreatePrivateConnectionRequest() - # It is an error to provide scopes and a transport instance. - transport = transports.DataMigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DataMigrationServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataMigrationServiceGrpcTransport( +def test_create_private_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - client = DataMigrationServiceClient(transport=transport) - assert client.transport is transport + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + client.create_private_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreatePrivateConnectionRequest() -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataMigrationServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - transport = transports.DataMigrationServiceGrpcAsyncIOTransport( +@pytest.mark.asyncio +async def test_create_private_connection_async( + transport: str = "grpc_asyncio", + request_type=clouddms.CreatePrivateConnectionRequest, +): + client = DataMigrationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataMigrationServiceGrpcTransport, - transports.DataMigrationServiceGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_private_connection(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreatePrivateConnectionRequest() -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - ], -) -def test_transport_kind(transport_name): - transport = DataMigrationServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. +@pytest.mark.asyncio +async def test_create_private_connection_async_from_dict(): + await test_create_private_connection_async(request_type=dict) + + +def test_create_private_connection_field_headers(): client = DataMigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), ) - assert isinstance( - client.transport, - transports.DataMigrationServiceGrpcTransport, - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreatePrivateConnectionRequest() -def test_data_migration_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DataMigrationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) + request.parent = "parent_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_private_connection(request) -def test_data_migration_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.DataMigrationServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "list_migration_jobs", - "get_migration_job", - "create_migration_job", - "update_migration_job", - "delete_migration_job", - "start_migration_job", - "stop_migration_job", - "resume_migration_job", - "promote_migration_job", - "verify_migration_job", - "restart_migration_job", - "generate_ssh_script", - "list_connection_profiles", - "get_connection_profile", - "create_connection_profile", - "update_connection_profile", - "delete_connection_profile", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] - with pytest.raises(NotImplementedError): - transport.close() - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client +@pytest.mark.asyncio +async def test_create_private_connection_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreatePrivateConnectionRequest() + request.parent = "parent_value" -def test_data_migration_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataMigrationServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", + type(client.transport.create_private_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) + await client.create_private_connection(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_data_migration_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DataMigrationServiceTransport() - adc.assert_called_once() - - -def test_data_migration_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DataMigrationServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataMigrationServiceGrpcTransport, - transports.DataMigrationServiceGrpcAsyncIOTransport, - ], -) -def test_data_migration_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) +def test_create_private_connection_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_private_connection( + parent="parent_value", + private_connection=clouddms_resources.PrivateConnection(name="name_value"), + private_connection_id="private_connection_id_value", + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataMigrationServiceGrpcTransport, - transports.DataMigrationServiceGrpcAsyncIOTransport, - ], -) -def test_data_migration_service_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].private_connection + mock_val = clouddms_resources.PrivateConnection(name="name_value") + assert arg == mock_val + arg = args[0].private_connection_id + mock_val = "private_connection_id_value" + assert arg == mock_val -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DataMigrationServiceGrpcTransport, grpc_helpers), - (transports.DataMigrationServiceGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_data_migration_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) +def test_create_private_connection_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - create_channel.assert_called_with( - "datamigration.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="datamigration.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_private_connection( + clouddms.CreatePrivateConnectionRequest(), + parent="parent_value", + private_connection=clouddms_resources.PrivateConnection(name="name_value"), + private_connection_id="private_connection_id_value", ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataMigrationServiceGrpcTransport, - transports.DataMigrationServiceGrpcAsyncIOTransport, - ], -) -def test_data_migration_service_grpc_transport_client_cert_source_for_mtls( - transport_class, -): - cred = ga_credentials.AnonymousCredentials() +@pytest.mark.asyncio +async def test_create_private_connection_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_private_connection( + parent="parent_value", + private_connection=clouddms_resources.PrivateConnection(name="name_value"), + private_connection_id="private_connection_id_value", ) - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].private_connection + mock_val = clouddms_resources.PrivateConnection(name="name_value") + assert arg == mock_val + arg = args[0].private_connection_id + mock_val = "private_connection_id_value" + assert arg == mock_val -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - ], -) -def test_data_migration_service_host_no_port(transport_name): - client = DataMigrationServiceClient( +@pytest.mark.asyncio +async def test_create_private_connection_flattened_error_async(): + client = DataMigrationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="datamigration.googleapis.com" - ), - transport=transport_name, ) - assert client.transport._host == ("datamigration.googleapis.com:443") + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_private_connection( + clouddms.CreatePrivateConnectionRequest(), + parent="parent_value", + private_connection=clouddms_resources.PrivateConnection(name="name_value"), + private_connection_id="private_connection_id_value", + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", - "grpc_asyncio", + clouddms.GetPrivateConnectionRequest, + dict, ], ) -def test_data_migration_service_host_with_port(transport_name): +def test_get_private_connection(request_type, transport: str = "grpc"): client = DataMigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="datamigration.googleapis.com:8000" - ), - transport=transport_name, + transport=transport, ) - assert client.transport._host == ("datamigration.googleapis.com:8000") + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() -def test_data_migration_service_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.PrivateConnection( + name="name_value", + display_name="display_name_value", + state=clouddms_resources.PrivateConnection.State.CREATING, + vpc_peering_config=clouddms_resources.VpcPeeringConfig( + vpc_name="vpc_name_value" + ), + ) + response = client.get_private_connection(request) - # Check that channel is used if provided. - transport = transports.DataMigrationServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetPrivateConnectionRequest() + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms_resources.PrivateConnection) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == clouddms_resources.PrivateConnection.State.CREATING -def test_data_migration_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - # Check that channel is used if provided. - transport = transports.DataMigrationServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, +def test_get_private_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataMigrationServiceGrpcTransport, - transports.DataMigrationServiceGrpcAsyncIOTransport, - ], -) -def test_data_migration_service_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + client.get_private_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetPrivateConnectionRequest() - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() +@pytest.mark.asyncio +async def test_get_private_connection_async( + transport: str = "grpc_asyncio", request_type=clouddms.GetPrivateConnectionRequest +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms_resources.PrivateConnection( + name="name_value", + display_name="display_name_value", + state=clouddms_resources.PrivateConnection.State.CREATING, ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred + ) + response = await client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetPrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms_resources.PrivateConnection) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.state == clouddms_resources.PrivateConnection.State.CREATING + + +@pytest.mark.asyncio +async def test_get_private_connection_async_from_dict(): + await test_get_private_connection_async(request_type=dict) + + +def test_get_private_connection_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetPrivateConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + call.return_value = clouddms_resources.PrivateConnection() + client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_private_connection_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetPrivateConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms_resources.PrivateConnection() + ) + await client.get_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_private_connection_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.PrivateConnection() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_private_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_private_connection_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_private_connection( + clouddms.GetPrivateConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_private_connection_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms_resources.PrivateConnection() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms_resources.PrivateConnection() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_private_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_private_connection_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_private_connection( + clouddms.GetPrivateConnectionRequest(), + name="name_value", + ) -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( - "transport_class", + "request_type", [ - transports.DataMigrationServiceGrpcTransport, - transports.DataMigrationServiceGrpcAsyncIOTransport, + clouddms.ListPrivateConnectionsRequest, + dict, ], ) -def test_data_migration_service_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() +def test_list_private_connections(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListPrivateConnectionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListPrivateConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPrivateConnectionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_private_connections_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + client.list_private_connections() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListPrivateConnectionsRequest() + + +@pytest.mark.asyncio +async def test_list_private_connections_async( + transport: str = "grpc_asyncio", request_type=clouddms.ListPrivateConnectionsRequest +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.ListPrivateConnectionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListPrivateConnectionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPrivateConnectionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_private_connections_async_from_dict(): + await test_list_private_connections_async(request_type=dict) + + +def test_list_private_connections_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListPrivateConnectionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + call.return_value = clouddms.ListPrivateConnectionsResponse() + client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_private_connections_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListPrivateConnectionsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.ListPrivateConnectionsResponse() + ) + await client.list_private_connections(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_private_connections_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListPrivateConnectionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_private_connections( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_private_connections_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_private_connections( + clouddms.ListPrivateConnectionsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_private_connections_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListPrivateConnectionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.ListPrivateConnectionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_private_connections( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_private_connections_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_private_connections( + clouddms.ListPrivateConnectionsRequest(), + parent="parent_value", + ) + + +def test_list_private_connections_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + next_page_token="abc", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token="def", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + ], + next_page_token="ghi", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_private_connections(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, clouddms_resources.PrivateConnection) for i in results) + + +def test_list_private_connections_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + next_page_token="abc", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token="def", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + ], + next_page_token="ghi", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + pages = list(client.list_private_connections(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_private_connections_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + next_page_token="abc", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token="def", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + ], + next_page_token="ghi", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_private_connections( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, clouddms_resources.PrivateConnection) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_private_connections_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_private_connections), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + next_page_token="abc", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[], + next_page_token="def", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + ], + next_page_token="ghi", + ), + clouddms.ListPrivateConnectionsResponse( + private_connections=[ + clouddms_resources.PrivateConnection(), + clouddms_resources.PrivateConnection(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_private_connections(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.DeletePrivateConnectionRequest, + dict, + ], +) +def test_delete_private_connection(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeletePrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_private_connection_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + client.delete_private_connection() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeletePrivateConnectionRequest() + + +@pytest.mark.asyncio +async def test_delete_private_connection_async( + transport: str = "grpc_asyncio", + request_type=clouddms.DeletePrivateConnectionRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeletePrivateConnectionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_private_connection_async_from_dict(): + await test_delete_private_connection_async(request_type=dict) + + +def test_delete_private_connection_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeletePrivateConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_private_connection_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeletePrivateConnectionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_private_connection(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_private_connection_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_private_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_private_connection_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_private_connection( + clouddms.DeletePrivateConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_private_connection_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_private_connection), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_private_connection( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_private_connection_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_private_connection( + clouddms.DeletePrivateConnectionRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.GetConversionWorkspaceRequest, + dict, + ], +) +def test_get_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.ConversionWorkspace( + name="name_value", + has_uncommitted_changes=True, + latest_commit_id="latest_commit_id_value", + display_name="display_name_value", + ) + response = client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, conversionworkspace_resources.ConversionWorkspace) + assert response.name == "name_value" + assert response.has_uncommitted_changes is True + assert response.latest_commit_id == "latest_commit_id_value" + assert response.display_name == "display_name_value" + + +def test_get_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), "__call__" + ) as call: + client.get_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_get_conversion_workspace_async( + transport: str = "grpc_asyncio", request_type=clouddms.GetConversionWorkspaceRequest +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + conversionworkspace_resources.ConversionWorkspace( + name="name_value", + has_uncommitted_changes=True, + latest_commit_id="latest_commit_id_value", + display_name="display_name_value", + ) + ) + response = await client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.GetConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, conversionworkspace_resources.ConversionWorkspace) + assert response.name == "name_value" + assert response.has_uncommitted_changes is True + assert response.latest_commit_id == "latest_commit_id_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_get_conversion_workspace_async_from_dict(): + await test_get_conversion_workspace_async(request_type=dict) + + +def test_get_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), "__call__" + ) as call: + call.return_value = conversionworkspace_resources.ConversionWorkspace() + client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.GetConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + conversionworkspace_resources.ConversionWorkspace() + ) + await client.get_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_conversion_workspace_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.ConversionWorkspace() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_conversion_workspace( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_conversion_workspace_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_conversion_workspace( + clouddms.GetConversionWorkspaceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_conversion_workspace_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = conversionworkspace_resources.ConversionWorkspace() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + conversionworkspace_resources.ConversionWorkspace() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_conversion_workspace( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_conversion_workspace_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_conversion_workspace( + clouddms.GetConversionWorkspaceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.ListConversionWorkspacesRequest, + dict, + ], +) +def test_list_conversion_workspaces(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConversionWorkspacesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListConversionWorkspacesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConversionWorkspacesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_conversion_workspaces_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + client.list_conversion_workspaces() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListConversionWorkspacesRequest() + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async( + transport: str = "grpc_asyncio", + request_type=clouddms.ListConversionWorkspacesRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.ListConversionWorkspacesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ListConversionWorkspacesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListConversionWorkspacesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async_from_dict(): + await test_list_conversion_workspaces_async(request_type=dict) + + +def test_list_conversion_workspaces_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListConversionWorkspacesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + call.return_value = clouddms.ListConversionWorkspacesResponse() + client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ListConversionWorkspacesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.ListConversionWorkspacesResponse() + ) + await client.list_conversion_workspaces(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_conversion_workspaces_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConversionWorkspacesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_conversion_workspaces( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_conversion_workspaces_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_conversion_workspaces( + clouddms.ListConversionWorkspacesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.ListConversionWorkspacesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.ListConversionWorkspacesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_conversion_workspaces( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_conversion_workspaces( + clouddms.ListConversionWorkspacesRequest(), + parent="parent_value", + ) + + +def test_list_conversion_workspaces_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token="abc", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[], + next_page_token="def", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token="ghi", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_conversion_workspaces(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, conversionworkspace_resources.ConversionWorkspace) + for i in results + ) + + +def test_list_conversion_workspaces_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token="abc", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[], + next_page_token="def", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token="ghi", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + ), + RuntimeError, + ) + pages = list(client.list_conversion_workspaces(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token="abc", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[], + next_page_token="def", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token="ghi", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_conversion_workspaces( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, conversionworkspace_resources.ConversionWorkspace) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_conversion_workspaces_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_conversion_workspaces), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token="abc", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[], + next_page_token="def", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + ], + next_page_token="ghi", + ), + clouddms.ListConversionWorkspacesResponse( + conversion_workspaces=[ + conversionworkspace_resources.ConversionWorkspace(), + conversionworkspace_resources.ConversionWorkspace(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_conversion_workspaces(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.CreateConversionWorkspaceRequest, + dict, + ], +) +def test_create_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), "__call__" + ) as call: + client.create_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_create_conversion_workspace_async( + transport: str = "grpc_asyncio", + request_type=clouddms.CreateConversionWorkspaceRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CreateConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_conversion_workspace_async_from_dict(): + await test_create_conversion_workspace_async(request_type=dict) + + +def test_create_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateConversionWorkspaceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CreateConversionWorkspaceRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_conversion_workspace_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_conversion_workspace( + parent="parent_value", + conversion_workspace=conversionworkspace_resources.ConversionWorkspace( + name="name_value" + ), + conversion_workspace_id="conversion_workspace_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].conversion_workspace + mock_val = conversionworkspace_resources.ConversionWorkspace(name="name_value") + assert arg == mock_val + arg = args[0].conversion_workspace_id + mock_val = "conversion_workspace_id_value" + assert arg == mock_val + + +def test_create_conversion_workspace_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_conversion_workspace( + clouddms.CreateConversionWorkspaceRequest(), + parent="parent_value", + conversion_workspace=conversionworkspace_resources.ConversionWorkspace( + name="name_value" + ), + conversion_workspace_id="conversion_workspace_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_conversion_workspace_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_conversion_workspace( + parent="parent_value", + conversion_workspace=conversionworkspace_resources.ConversionWorkspace( + name="name_value" + ), + conversion_workspace_id="conversion_workspace_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].conversion_workspace + mock_val = conversionworkspace_resources.ConversionWorkspace(name="name_value") + assert arg == mock_val + arg = args[0].conversion_workspace_id + mock_val = "conversion_workspace_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_conversion_workspace_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_conversion_workspace( + clouddms.CreateConversionWorkspaceRequest(), + parent="parent_value", + conversion_workspace=conversionworkspace_resources.ConversionWorkspace( + name="name_value" + ), + conversion_workspace_id="conversion_workspace_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.UpdateConversionWorkspaceRequest, + dict, + ], +) +def test_update_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), "__call__" + ) as call: + client.update_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_update_conversion_workspace_async( + transport: str = "grpc_asyncio", + request_type=clouddms.UpdateConversionWorkspaceRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.UpdateConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_conversion_workspace_async_from_dict(): + await test_update_conversion_workspace_async(request_type=dict) + + +def test_update_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.UpdateConversionWorkspaceRequest() + + request.conversion_workspace.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "conversion_workspace.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.UpdateConversionWorkspaceRequest() + + request.conversion_workspace.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "conversion_workspace.name=name_value", + ) in kw["metadata"] + + +def test_update_conversion_workspace_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_conversion_workspace( + conversion_workspace=conversionworkspace_resources.ConversionWorkspace( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].conversion_workspace + mock_val = conversionworkspace_resources.ConversionWorkspace(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_conversion_workspace_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_conversion_workspace( + clouddms.UpdateConversionWorkspaceRequest(), + conversion_workspace=conversionworkspace_resources.ConversionWorkspace( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_conversion_workspace_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_conversion_workspace( + conversion_workspace=conversionworkspace_resources.ConversionWorkspace( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].conversion_workspace + mock_val = conversionworkspace_resources.ConversionWorkspace(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_conversion_workspace_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_conversion_workspace( + clouddms.UpdateConversionWorkspaceRequest(), + conversion_workspace=conversionworkspace_resources.ConversionWorkspace( + name="name_value" + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.DeleteConversionWorkspaceRequest, + dict, + ], +) +def test_delete_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), "__call__" + ) as call: + client.delete_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_async( + transport: str = "grpc_asyncio", + request_type=clouddms.DeleteConversionWorkspaceRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DeleteConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_async_from_dict(): + await test_delete_conversion_workspace_async(request_type=dict) + + +def test_delete_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DeleteConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_conversion_workspace_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_conversion_workspace( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_conversion_workspace_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_conversion_workspace( + clouddms.DeleteConversionWorkspaceRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_conversion_workspace( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_conversion_workspace_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_conversion_workspace( + clouddms.DeleteConversionWorkspaceRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.SeedConversionWorkspaceRequest, + dict, + ], +) +def test_seed_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SeedConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_seed_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), "__call__" + ) as call: + client.seed_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SeedConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_seed_conversion_workspace_async( + transport: str = "grpc_asyncio", + request_type=clouddms.SeedConversionWorkspaceRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SeedConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_seed_conversion_workspace_async_from_dict(): + await test_seed_conversion_workspace_async(request_type=dict) + + +def test_seed_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.SeedConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_seed_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.SeedConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seed_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.seed_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.ImportMappingRulesRequest, + dict, + ], +) +def test_import_mapping_rules(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ImportMappingRulesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_import_mapping_rules_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), "__call__" + ) as call: + client.import_mapping_rules() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ImportMappingRulesRequest() + + +@pytest.mark.asyncio +async def test_import_mapping_rules_async( + transport: str = "grpc_asyncio", request_type=clouddms.ImportMappingRulesRequest +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ImportMappingRulesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_import_mapping_rules_async_from_dict(): + await test_import_mapping_rules_async(request_type=dict) + + +def test_import_mapping_rules_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ImportMappingRulesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_import_mapping_rules_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ImportMappingRulesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.import_mapping_rules), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.import_mapping_rules(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.ConvertConversionWorkspaceRequest, + dict, + ], +) +def test_convert_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ConvertConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_convert_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), "__call__" + ) as call: + client.convert_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ConvertConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_convert_conversion_workspace_async( + transport: str = "grpc_asyncio", + request_type=clouddms.ConvertConversionWorkspaceRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ConvertConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_convert_conversion_workspace_async_from_dict(): + await test_convert_conversion_workspace_async(request_type=dict) + + +def test_convert_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ConvertConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_convert_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ConvertConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.convert_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.convert_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.CommitConversionWorkspaceRequest, + dict, + ], +) +def test_commit_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CommitConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_commit_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), "__call__" + ) as call: + client.commit_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CommitConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_commit_conversion_workspace_async( + transport: str = "grpc_asyncio", + request_type=clouddms.CommitConversionWorkspaceRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.CommitConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_commit_conversion_workspace_async_from_dict(): + await test_commit_conversion_workspace_async(request_type=dict) + + +def test_commit_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CommitConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_commit_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.CommitConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.commit_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.commit_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.RollbackConversionWorkspaceRequest, + dict, + ], +) +def test_rollback_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.RollbackConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_rollback_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), "__call__" + ) as call: + client.rollback_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.RollbackConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_rollback_conversion_workspace_async( + transport: str = "grpc_asyncio", + request_type=clouddms.RollbackConversionWorkspaceRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.RollbackConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_rollback_conversion_workspace_async_from_dict(): + await test_rollback_conversion_workspace_async(request_type=dict) + + +def test_rollback_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.RollbackConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_rollback_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.RollbackConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.rollback_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.rollback_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.ApplyConversionWorkspaceRequest, + dict, + ], +) +def test_apply_conversion_workspace(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ApplyConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_apply_conversion_workspace_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), "__call__" + ) as call: + client.apply_conversion_workspace() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ApplyConversionWorkspaceRequest() + + +@pytest.mark.asyncio +async def test_apply_conversion_workspace_async( + transport: str = "grpc_asyncio", + request_type=clouddms.ApplyConversionWorkspaceRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.ApplyConversionWorkspaceRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_apply_conversion_workspace_async_from_dict(): + await test_apply_conversion_workspace_async(request_type=dict) + + +def test_apply_conversion_workspace_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ApplyConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_apply_conversion_workspace_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.ApplyConversionWorkspaceRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.apply_conversion_workspace), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.apply_conversion_workspace(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.DescribeDatabaseEntitiesRequest, + dict, + ], +) +def test_describe_database_entities(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.DescribeDatabaseEntitiesResponse( + next_page_token="next_page_token_value", + ) + response = client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeDatabaseEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.DescribeDatabaseEntitiesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_describe_database_entities_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), "__call__" + ) as call: + client.describe_database_entities() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeDatabaseEntitiesRequest() + + +@pytest.mark.asyncio +async def test_describe_database_entities_async( + transport: str = "grpc_asyncio", + request_type=clouddms.DescribeDatabaseEntitiesRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.DescribeDatabaseEntitiesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeDatabaseEntitiesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.DescribeDatabaseEntitiesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_describe_database_entities_async_from_dict(): + await test_describe_database_entities_async(request_type=dict) + + +def test_describe_database_entities_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DescribeDatabaseEntitiesRequest() + + request.conversion_workspace = "conversion_workspace_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), "__call__" + ) as call: + call.return_value = clouddms.DescribeDatabaseEntitiesResponse() + client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "conversion_workspace=conversion_workspace_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_describe_database_entities_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DescribeDatabaseEntitiesRequest() + + request.conversion_workspace = "conversion_workspace_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.DescribeDatabaseEntitiesResponse() + ) + await client.describe_database_entities(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "conversion_workspace=conversion_workspace_value", + ) in kw["metadata"] + + +def test_describe_database_entities_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token="abc", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[], + next_page_token="def", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token="ghi", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("conversion_workspace", ""),)), + ) + pager = client.describe_database_entities(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, conversionworkspace_resources.DatabaseEntity) for i in results + ) + + +def test_describe_database_entities_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token="abc", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[], + next_page_token="def", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token="ghi", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + ), + RuntimeError, + ) + pages = list(client.describe_database_entities(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_describe_database_entities_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token="abc", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[], + next_page_token="def", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token="ghi", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + ), + RuntimeError, + ) + async_pager = await client.describe_database_entities( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, conversionworkspace_resources.DatabaseEntity) + for i in responses + ) + + +@pytest.mark.asyncio +async def test_describe_database_entities_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_database_entities), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token="abc", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[], + next_page_token="def", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + ], + next_page_token="ghi", + ), + clouddms.DescribeDatabaseEntitiesResponse( + database_entities=[ + conversionworkspace_resources.DatabaseEntity(), + conversionworkspace_resources.DatabaseEntity(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.describe_database_entities(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.SearchBackgroundJobsRequest, + dict, + ], +) +def test_search_background_jobs(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.SearchBackgroundJobsResponse() + response = client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SearchBackgroundJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.SearchBackgroundJobsResponse) + + +def test_search_background_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), "__call__" + ) as call: + client.search_background_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SearchBackgroundJobsRequest() + + +@pytest.mark.asyncio +async def test_search_background_jobs_async( + transport: str = "grpc_asyncio", request_type=clouddms.SearchBackgroundJobsRequest +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.SearchBackgroundJobsResponse() + ) + response = await client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.SearchBackgroundJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.SearchBackgroundJobsResponse) + + +@pytest.mark.asyncio +async def test_search_background_jobs_async_from_dict(): + await test_search_background_jobs_async(request_type=dict) + + +def test_search_background_jobs_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.SearchBackgroundJobsRequest() + + request.conversion_workspace = "conversion_workspace_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), "__call__" + ) as call: + call.return_value = clouddms.SearchBackgroundJobsResponse() + client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "conversion_workspace=conversion_workspace_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_search_background_jobs_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.SearchBackgroundJobsRequest() + + request.conversion_workspace = "conversion_workspace_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_background_jobs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.SearchBackgroundJobsResponse() + ) + await client.search_background_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "conversion_workspace=conversion_workspace_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.DescribeConversionWorkspaceRevisionsRequest, + dict, + ], +) +def test_describe_conversion_workspace_revisions(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.DescribeConversionWorkspaceRevisionsResponse() + response = client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeConversionWorkspaceRevisionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.DescribeConversionWorkspaceRevisionsResponse) + + +def test_describe_conversion_workspace_revisions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), "__call__" + ) as call: + client.describe_conversion_workspace_revisions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeConversionWorkspaceRevisionsRequest() + + +@pytest.mark.asyncio +async def test_describe_conversion_workspace_revisions_async( + transport: str = "grpc_asyncio", + request_type=clouddms.DescribeConversionWorkspaceRevisionsRequest, +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.DescribeConversionWorkspaceRevisionsResponse() + ) + response = await client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.DescribeConversionWorkspaceRevisionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, clouddms.DescribeConversionWorkspaceRevisionsResponse) + + +@pytest.mark.asyncio +async def test_describe_conversion_workspace_revisions_async_from_dict(): + await test_describe_conversion_workspace_revisions_async(request_type=dict) + + +def test_describe_conversion_workspace_revisions_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DescribeConversionWorkspaceRevisionsRequest() + + request.conversion_workspace = "conversion_workspace_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), "__call__" + ) as call: + call.return_value = clouddms.DescribeConversionWorkspaceRevisionsResponse() + client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "conversion_workspace=conversion_workspace_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_describe_conversion_workspace_revisions_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.DescribeConversionWorkspaceRevisionsRequest() + + request.conversion_workspace = "conversion_workspace_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.describe_conversion_workspace_revisions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.DescribeConversionWorkspaceRevisionsResponse() + ) + await client.describe_conversion_workspace_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "conversion_workspace=conversion_workspace_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + clouddms.FetchStaticIpsRequest, + dict, + ], +) +def test_fetch_static_ips(request_type, transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.FetchStaticIpsResponse( + static_ips=["static_ips_value"], + next_page_token="next_page_token_value", + ) + response = client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.FetchStaticIpsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchStaticIpsPager) + assert response.static_ips == ["static_ips_value"] + assert response.next_page_token == "next_page_token_value" + + +def test_fetch_static_ips_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + client.fetch_static_ips() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.FetchStaticIpsRequest() + + +@pytest.mark.asyncio +async def test_fetch_static_ips_async( + transport: str = "grpc_asyncio", request_type=clouddms.FetchStaticIpsRequest +): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.FetchStaticIpsResponse( + static_ips=["static_ips_value"], + next_page_token="next_page_token_value", + ) + ) + response = await client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == clouddms.FetchStaticIpsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FetchStaticIpsAsyncPager) + assert response.static_ips == ["static_ips_value"] + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_fetch_static_ips_async_from_dict(): + await test_fetch_static_ips_async(request_type=dict) + + +def test_fetch_static_ips_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.FetchStaticIpsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + call.return_value = clouddms.FetchStaticIpsResponse() + client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_fetch_static_ips_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = clouddms.FetchStaticIpsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.FetchStaticIpsResponse() + ) + await client.fetch_static_ips(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_fetch_static_ips_flattened(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.FetchStaticIpsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.fetch_static_ips( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_fetch_static_ips_flattened_error(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.fetch_static_ips( + clouddms.FetchStaticIpsRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_fetch_static_ips_flattened_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = clouddms.FetchStaticIpsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + clouddms.FetchStaticIpsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.fetch_static_ips( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_fetch_static_ips_flattened_error_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.fetch_static_ips( + clouddms.FetchStaticIpsRequest(), + name="name_value", + ) + + +def test_fetch_static_ips_pager(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[], + next_page_token="def", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token="ghi", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), + ) + pager = client.fetch_static_ips(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + +def test_fetch_static_ips_pages(transport_name: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.fetch_static_ips), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[], + next_page_token="def", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token="ghi", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = list(client.fetch_static_ips(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_fetch_static_ips_async_pager(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[], + next_page_token="def", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token="ghi", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + RuntimeError, + ) + async_pager = await client.fetch_static_ips( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) for i in responses) + + +@pytest.mark.asyncio +async def test_fetch_static_ips_async_pages(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.fetch_static_ips), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[], + next_page_token="def", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + ], + next_page_token="ghi", + ), + clouddms.FetchStaticIpsResponse( + static_ips=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.fetch_static_ips(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataMigrationServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataMigrationServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataMigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataMigrationServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = DataMigrationServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataMigrationServiceGrpcTransport, + ) + + +def test_data_migration_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataMigrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_data_migration_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DataMigrationServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_migration_jobs", + "get_migration_job", + "create_migration_job", + "update_migration_job", + "delete_migration_job", + "start_migration_job", + "stop_migration_job", + "resume_migration_job", + "promote_migration_job", + "verify_migration_job", + "restart_migration_job", + "generate_ssh_script", + "list_connection_profiles", + "get_connection_profile", + "create_connection_profile", + "update_connection_profile", + "delete_connection_profile", + "create_private_connection", + "get_private_connection", + "list_private_connections", + "delete_private_connection", + "get_conversion_workspace", + "list_conversion_workspaces", + "create_conversion_workspace", + "update_conversion_workspace", + "delete_conversion_workspace", + "seed_conversion_workspace", + "import_mapping_rules", + "convert_conversion_workspace", + "commit_conversion_workspace", + "rollback_conversion_workspace", + "apply_conversion_workspace", + "describe_database_entities", + "search_background_jobs", + "describe_conversion_workspace_revisions", + "fetch_static_ips", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_data_migration_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataMigrationServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_data_migration_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.clouddms_v1.services.data_migration_service.transports.DataMigrationServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataMigrationServiceTransport() + adc.assert_called_once() + + +def test_data_migration_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataMigrationServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, + ], +) +def test_data_migration_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, + ], +) +def test_data_migration_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataMigrationServiceGrpcTransport, grpc_helpers), + (transports.DataMigrationServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_data_migration_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "datamigration.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="datamigration.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, + ], +) +def test_data_migration_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_data_migration_service_host_no_port(transport_name): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datamigration.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ("datamigration.googleapis.com:443") + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_data_migration_service_host_with_port(transport_name): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="datamigration.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ("datamigration.googleapis.com:8000") + + +def test_data_migration_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataMigrationServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_data_migration_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataMigrationServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, + ], +) +def test_data_migration_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataMigrationServiceGrpcTransport, + transports.DataMigrationServiceGrpcAsyncIOTransport, + ], +) +def test_data_migration_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_migration_service_grpc_lro_client(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_data_migration_service_grpc_lro_async_client(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_connection_profile_path(): + project = "squid" + location = "clam" + connection_profile = "whelk" + expected = "projects/{project}/locations/{location}/connectionProfiles/{connection_profile}".format( + project=project, + location=location, + connection_profile=connection_profile, + ) + actual = DataMigrationServiceClient.connection_profile_path( + project, location, connection_profile + ) + assert expected == actual + + +def test_parse_connection_profile_path(): + expected = { + "project": "octopus", + "location": "oyster", + "connection_profile": "nudibranch", + } + path = DataMigrationServiceClient.connection_profile_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_connection_profile_path(path) + assert expected == actual + + +def test_conversion_workspace_path(): + project = "cuttlefish" + location = "mussel" + conversion_workspace = "winkle" + expected = "projects/{project}/locations/{location}/conversionWorkspaces/{conversion_workspace}".format( + project=project, + location=location, + conversion_workspace=conversion_workspace, + ) + actual = DataMigrationServiceClient.conversion_workspace_path( + project, location, conversion_workspace + ) + assert expected == actual + + +def test_parse_conversion_workspace_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "conversion_workspace": "abalone", + } + path = DataMigrationServiceClient.conversion_workspace_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_conversion_workspace_path(path) + assert expected == actual + + +def test_migration_job_path(): + project = "squid" + location = "clam" + migration_job = "whelk" + expected = ( + "projects/{project}/locations/{location}/migrationJobs/{migration_job}".format( + project=project, + location=location, + migration_job=migration_job, + ) + ) + actual = DataMigrationServiceClient.migration_job_path( + project, location, migration_job + ) + assert expected == actual + + +def test_parse_migration_job_path(): + expected = { + "project": "octopus", + "location": "oyster", + "migration_job": "nudibranch", + } + path = DataMigrationServiceClient.migration_job_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_migration_job_path(path) + assert expected == actual + + +def test_networks_path(): + project = "cuttlefish" + network = "mussel" + expected = "projects/{project}/global/networks/{network}".format( + project=project, + network=network, + ) + actual = DataMigrationServiceClient.networks_path(project, network) + assert expected == actual + + +def test_parse_networks_path(): + expected = { + "project": "winkle", + "network": "nautilus", + } + path = DataMigrationServiceClient.networks_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_networks_path(path) + assert expected == actual + + +def test_private_connection_path(): + project = "scallop" + location = "abalone" + private_connection = "squid" + expected = "projects/{project}/locations/{location}/privateConnections/{private_connection}".format( + project=project, + location=location, + private_connection=private_connection, + ) + actual = DataMigrationServiceClient.private_connection_path( + project, location, private_connection + ) + assert expected == actual + + +def test_parse_private_connection_path(): + expected = { + "project": "clam", + "location": "whelk", + "private_connection": "octopus", + } + path = DataMigrationServiceClient.private_connection_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_private_connection_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DataMigrationServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = DataMigrationServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DataMigrationServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = DataMigrationServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DataMigrationServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = DataMigrationServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format( + project=project, + ) + actual = DataMigrationServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = DataMigrationServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DataMigrationServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = DataMigrationServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataMigrationServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DataMigrationServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DataMigrationServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DataMigrationServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_delete_operation(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_set_iam_policy(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] -def test_data_migration_service_grpc_lro_client(): +def test_get_iam_policy_from_dict(): client = DataMigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsClient, ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() -def test_data_migration_service_grpc_lro_async_client(): +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): client = DataMigrationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - transport = client.transport - - # Ensure that we have a api-core operations client. - assert isinstance( - transport.operations_client, - operations_v1.OperationsAsyncClient, ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - # Ensure that subsequent calls to the property send the exact same object. - assert transport.operations_client is transport.operations_client + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() -def test_connection_profile_path(): - project = "squid" - location = "clam" - connection_profile = "whelk" - expected = "projects/{project}/locations/{location}/connectionProfiles/{connection_profile}".format( - project=project, - location=location, - connection_profile=connection_profile, - ) - actual = DataMigrationServiceClient.connection_profile_path( - project, location, connection_profile +def test_test_iam_permissions(transport: str = "grpc"): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - assert expected == actual - -def test_parse_connection_profile_path(): - expected = { - "project": "octopus", - "location": "oyster", - "connection_profile": "nudibranch", - } - path = DataMigrationServiceClient.connection_profile_path(**expected) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_connection_profile_path(path) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + response = client.test_iam_permissions(request) -def test_migration_job_path(): - project = "cuttlefish" - location = "mussel" - migration_job = "winkle" - expected = ( - "projects/{project}/locations/{location}/migrationJobs/{migration_job}".format( - project=project, - location=location, - migration_job=migration_job, - ) - ) - actual = DataMigrationServiceClient.migration_job_path( - project, location, migration_job - ) - assert expected == actual + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_parse_migration_job_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "migration_job": "abalone", - } - path = DataMigrationServiceClient.migration_job_path(**expected) + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_migration_job_path(path) - assert expected == actual + assert response.permissions == ["permissions_value"] -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - actual = DataMigrationServiceClient.common_billing_account_path(billing_account) - assert expected == actual + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = DataMigrationServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_common_billing_account_path(path) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + ) + response = await client.test_iam_permissions(request) -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = DataMigrationServiceClient.common_folder_path(folder) - assert expected == actual + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = DataMigrationServiceClient.common_folder_path(**expected) + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_common_folder_path(path) - assert expected == actual + assert response.permissions == ["permissions_value"] -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, +def test_test_iam_permissions_field_headers(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = DataMigrationServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = DataMigrationServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_common_organization_path(path) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) - actual = DataMigrationServiceClient.common_project_path(project) - assert expected == actual + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + client.test_iam_permissions(request) -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = DataMigrationServiceClient.common_project_path(**expected) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_common_project_path(path) - assert expected == actual + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = DataMigrationServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), ) - actual = DataMigrationServiceClient.common_location_path(project, location) - assert expected == actual + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = DataMigrationServiceClient.common_location_path(**expected) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) - # Check that the path construction is reversible. - actual = DataMigrationServiceClient.parse_common_location_path(path) - assert expected == actual + await client.test_iam_permissions(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] - with mock.patch.object( - transports.DataMigrationServiceTransport, "_prep_wrapped_messages" - ) as prep: - client = DataMigrationServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) +def test_test_iam_permissions_from_dict(): + client = DataMigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - transports.DataMigrationServiceTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = DataMigrationServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } ) - prep.assert_called_once_with(client_info) + call.assert_called() @pytest.mark.asyncio -async def test_transport_close_async(): +async def test_test_iam_permissions_from_dict_async(): client = DataMigrationServiceAsyncClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", ) + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() def test_transport_close(): From 4fc081a17d80e176e6cfd165f43f878ac904c121 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 25 May 2023 16:24:43 +0000 Subject: [PATCH 2/3] build(deps): bump requests to 2.31.0 [autoapprove] (#178) Source-Link: https://togithub.com/googleapis/synthtool/commit/30bd01b4ab78bf1b2a425816e15b3e7e090993dd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b --- .github/.OwlBot.lock.yaml | 3 ++- .kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index b8edda5..32b3c48 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 + digest: sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b +# created: 2023-05-25T14:56:16.294623272Z diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 66a2172..3b8d7ee 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -419,9 +419,9 @@ readme-renderer==37.3 \ --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine -requests==2.28.1 \ - --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ - --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 +requests==2.31.0 \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 # via # gcp-releasetool # google-api-core From a8f2916637b40659377001f01bfed4d9b178350c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 31 May 2023 13:41:13 -0400 Subject: [PATCH 3/3] chore(main): release 1.7.0 (#177) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 7 +++++++ google/cloud/clouddms/gapic_version.py | 2 +- google/cloud/clouddms_v1/gapic_version.py | 2 +- .../snippet_metadata_google.cloud.clouddms.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 9464c4e..64e0684 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.6.2" + ".": "1.7.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 724f42b..102f815 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.7.0](https://github.com/googleapis/python-dms/compare/v1.6.2...v1.7.0) (2023-05-25) + + +### Features + +* Add Oracle to PostgreSQL migration APIs ([#176](https://github.com/googleapis/python-dms/issues/176)) ([bf5348b](https://github.com/googleapis/python-dms/commit/bf5348be1fabd967f2a0df3719c4046378ebf4c4)) + ## [1.6.2](https://github.com/googleapis/python-dms/compare/v1.6.1...v1.6.2) (2023-03-23) diff --git a/google/cloud/clouddms/gapic_version.py b/google/cloud/clouddms/gapic_version.py index c6e54fe..f033c61 100644 --- a/google/cloud/clouddms/gapic_version.py +++ b/google/cloud/clouddms/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.6.2" # {x-release-please-version} +__version__ = "1.7.0" # {x-release-please-version} diff --git a/google/cloud/clouddms_v1/gapic_version.py b/google/cloud/clouddms_v1/gapic_version.py index c6e54fe..f033c61 100644 --- a/google/cloud/clouddms_v1/gapic_version.py +++ b/google/cloud/clouddms_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "1.6.2" # {x-release-please-version} +__version__ = "1.7.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json index 10d2ba4..e8fba75 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.clouddms.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-dms", - "version": "0.1.0" + "version": "1.7.0" }, "snippets": [ {