From 07b57303832461cac13da7403551ef5365c975e1 Mon Sep 17 00:00:00 2001 From: Corey Brune Date: Mon, 11 Sep 2017 17:17:11 -0500 Subject: [PATCH 1/6] initial import for branch new/api --- .../add_windows_env.py | 6 +- .../delphix_admin_setup.py | 8 +-- .../delphix_snapshot_group_will_plugin.py | 6 +- .../delphix_will_plugin.py | 4 +- .../dx_authorization.py | 0 dx_database.py => v1_8_0/dx_database.py | 8 +-- dx_delete_vdb.py => v1_8_0/dx_delete_vdb.py | 12 ++-- dx_environment.py => v1_8_0/dx_environment.py | 24 +++---- dx_groups.py => v1_8_0/dx_groups.py | 12 ++-- .../dx_jetstream_container.py | 13 ++-- dx_jobs.py => v1_8_0/dx_jobs.py | 8 +-- dx_operations.py => v1_8_0/dx_operations.py | 16 ++--- .../dx_operations_vdb.py | 0 .../dx_operations_vdb_orig.py | 20 +++--- .../dx_provision_dsource.py | 41 ++++++------ .../dx_provision_vdb.py | 66 +++++++++---------- dx_refresh_db.py => v1_8_0/dx_refresh_db.py | 0 dx_replication.py => v1_8_0/dx_replication.py | 16 ++--- dx_rewind_vdb.py => v1_8_0/dx_rewind_vdb.py | 16 ++--- dx_skel.py => v1_8_0/dx_skel.py | 8 +-- dx_snapshot_db.py => v1_8_0/dx_snapshot_db.py | 12 ++-- dx_update_env.py => v1_8_0/dx_update_env.py | 14 ++-- dx_users.py => v1_8_0/dx_users.py | 22 +++---- dxtools.conf => v1_8_0/dxtools.conf | 0 .../engine_network_assignment.py | 10 +-- engine_setup.py => v1_8_0/engine_setup.py | 10 +-- .../find_missing_archivelogs.py | 0 .../get_engine_pub_key.py | 8 +-- js_bookmark.py => v1_8_0/js_bookmark.py | 0 js_branch.py => v1_8_0/js_branch.py | 0 js_container.py => v1_8_0/js_container.py | 0 js_template.py => v1_8_0/js_template.py | 0 {lib => v1_8_0/lib}/DlpxException.py | 1 - {lib => v1_8_0/lib}/DxLogging.py | 0 {lib => v1_8_0/lib}/DxTimeflow.py | 0 {lib => v1_8_0/lib}/GetReferences.py | 0 {lib => v1_8_0/lib}/GetSession.py | 0 {lib => v1_8_0/lib}/__init__.py | 0 .../list_all_databases.py | 6 +- .../simple_snapshot.py | 6 +- snapshot_group.py => v1_8_0/snapshot_group.py | 8 +-- .../test_dx_authorization.py | 0 .../test_dx_operation.py | 0 .../test_js_bookmarks.py | 0 .../test_js_branches.py | 0 .../test_js_containers.py | 0 .../test_js_templates.py | 0 .../trigger_replication.py | 8 +-- 48 files changed, 193 insertions(+), 196 deletions(-) rename add_windows_env.py => v1_8_0/add_windows_env.py (86%) rename delphix_admin_setup.py => v1_8_0/delphix_admin_setup.py (96%) rename delphix_snapshot_group_will_plugin.py => v1_8_0/delphix_snapshot_group_will_plugin.py (86%) rename delphix_will_plugin.py => v1_8_0/delphix_will_plugin.py (98%) rename dx_authorization.py => v1_8_0/dx_authorization.py (100%) rename dx_database.py => v1_8_0/dx_database.py (98%) rename dx_delete_vdb.py => v1_8_0/dx_delete_vdb.py (98%) rename dx_environment.py => v1_8_0/dx_environment.py (97%) rename dx_groups.py => v1_8_0/dx_groups.py (97%) rename dx_jetstream_container.py => v1_8_0/dx_jetstream_container.py (98%) rename dx_jobs.py => v1_8_0/dx_jobs.py (98%) rename dx_operations.py => v1_8_0/dx_operations.py (98%) rename dx_operations_vdb.py => v1_8_0/dx_operations_vdb.py (100%) rename dx_operations_vdb_orig.py => v1_8_0/dx_operations_vdb_orig.py (97%) rename dx_provision_dsource.py => v1_8_0/dx_provision_dsource.py (96%) rename dx_provision_vdb.py => v1_8_0/dx_provision_vdb.py (96%) rename dx_refresh_db.py => v1_8_0/dx_refresh_db.py (100%) rename dx_replication.py => v1_8_0/dx_replication.py (97%) rename dx_rewind_vdb.py => v1_8_0/dx_rewind_vdb.py (97%) rename dx_skel.py => v1_8_0/dx_skel.py (98%) rename dx_snapshot_db.py => v1_8_0/dx_snapshot_db.py (98%) rename dx_update_env.py => v1_8_0/dx_update_env.py (96%) rename dx_users.py => v1_8_0/dx_users.py (96%) rename dxtools.conf => v1_8_0/dxtools.conf (100%) rename engine_network_assignment.py => v1_8_0/engine_network_assignment.py (96%) rename engine_setup.py => v1_8_0/engine_setup.py (96%) rename find_missing_archivelogs.py => v1_8_0/find_missing_archivelogs.py (100%) rename get_engine_pub_key.py => v1_8_0/get_engine_pub_key.py (96%) rename js_bookmark.py => v1_8_0/js_bookmark.py (100%) rename js_branch.py => v1_8_0/js_branch.py (100%) rename js_container.py => v1_8_0/js_container.py (100%) rename js_template.py => v1_8_0/js_template.py (100%) rename {lib => v1_8_0/lib}/DlpxException.py (89%) rename {lib => v1_8_0/lib}/DxLogging.py (100%) rename {lib => v1_8_0/lib}/DxTimeflow.py (100%) rename {lib => v1_8_0/lib}/GetReferences.py (100%) rename {lib => v1_8_0/lib}/GetSession.py (100%) rename {lib => v1_8_0/lib}/__init__.py (100%) rename list_all_databases.py => v1_8_0/list_all_databases.py (70%) rename simple_snapshot.py => v1_8_0/simple_snapshot.py (75%) rename snapshot_group.py => v1_8_0/snapshot_group.py (70%) rename test_dx_authorization.py => v1_8_0/test_dx_authorization.py (100%) rename test_dx_operation.py => v1_8_0/test_dx_operation.py (100%) rename test_js_bookmarks.py => v1_8_0/test_js_bookmarks.py (100%) rename test_js_branches.py => v1_8_0/test_js_branches.py (100%) rename test_js_containers.py => v1_8_0/test_js_containers.py (100%) rename test_js_templates.py => v1_8_0/test_js_templates.py (100%) rename trigger_replication.py => v1_8_0/trigger_replication.py (89%) diff --git a/add_windows_env.py b/v1_8_0/add_windows_env.py similarity index 86% rename from add_windows_env.py rename to v1_8_0/add_windows_env.py index 0081c32..ffe50fc 100755 --- a/add_windows_env.py +++ b/v1_8_0/add_windows_env.py @@ -3,9 +3,9 @@ VERSION="v.0.0.001" #just a quick and dirty example of adding a windows source -from delphixpy.delphix_engine import DelphixEngine -from delphixpy.web import environment -from delphixpy.web.vo import HostEnvironmentCreateParameters, EnvironmentUser, PasswordCredential, \ +from delphixpy.v1_8_0.delphix_engine import DelphixEngine +from delphixpy.v1_8_0.web import environment +from delphixpy.v1_8_0.web.vo import HostEnvironmentCreateParameters, EnvironmentUser, PasswordCredential, \ WindowsHostEnvironment, WindowsHostCreateParameters, WindowsHost engine_address = "192.168.2.37" diff --git a/delphix_admin_setup.py b/v1_8_0/delphix_admin_setup.py similarity index 96% rename from delphix_admin_setup.py rename to v1_8_0/delphix_admin_setup.py index 2897434..dd7049b 100755 --- a/delphix_admin_setup.py +++ b/v1_8_0/delphix_admin_setup.py @@ -16,10 +16,10 @@ import traceback import untangle -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.exceptions import HttpError, JobError -from delphixpy.v1_6_0.web import user -from delphixpy.v1_6_0.web.vo import CredentialUpdateParameters, PasswordCredential, User +from delphixpy.v1_8_0.delphix_engine import DelphixEngine +from delphixpy.v1_8_0.exceptions import HttpError, JobError +from delphixpy.v1_8_0.web import user +from delphixpy.v1_8_0.web.vo import CredentialUpdateParameters, PasswordCredential, User def serversess(f_engine_address, f_engine_username, f_engine_password): diff --git a/delphix_snapshot_group_will_plugin.py b/v1_8_0/delphix_snapshot_group_will_plugin.py similarity index 86% rename from delphix_snapshot_group_will_plugin.py rename to v1_8_0/delphix_snapshot_group_will_plugin.py index 89833f2..279acd3 100755 --- a/delphix_snapshot_group_will_plugin.py +++ b/v1_8_0/delphix_snapshot_group_will_plugin.py @@ -4,9 +4,9 @@ from will.plugin import WillPlugin from will.decorators import respond_to, periodic, hear, randomly, route, rendered_template, require_settings -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.web import group, database -from delphixpy.v1_6_0 import job_context +from delphixpy.v1_8_0.delphix_engine import DelphixEngine +from delphixpy.v1_8_0.web import group, database +from delphixpy.v1_8_0 import job_context class DelphixSnapshotPlugin(WillPlugin): diff --git a/delphix_will_plugin.py b/v1_8_0/delphix_will_plugin.py similarity index 98% rename from delphix_will_plugin.py rename to v1_8_0/delphix_will_plugin.py index ba58e07..37f7682 100755 --- a/delphix_will_plugin.py +++ b/v1_8_0/delphix_will_plugin.py @@ -4,8 +4,8 @@ from will.plugin import WillPlugin from will.decorators import respond_to, periodic, hear, randomly, route, rendered_template, require_settings -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.web import database +from delphixpy.v1_8_0.delphix_engine import DelphixEngine +from delphixpy.v1_8_0.web import database import imp, subprocess, shlex VERSION=0.001 diff --git a/dx_authorization.py b/v1_8_0/dx_authorization.py similarity index 100% rename from dx_authorization.py rename to v1_8_0/dx_authorization.py diff --git a/dx_database.py b/v1_8_0/dx_database.py similarity index 98% rename from dx_database.py rename to v1_8_0/dx_database.py index 5216052..47b30db 100755 --- a/dx_database.py +++ b/v1_8_0/dx_database.py @@ -43,10 +43,10 @@ from time import sleep, time from docopt import docopt -from delphixpy.exceptions import HttpError -from delphixpy.exceptions import JobError -from delphixpy.exceptions import RequestError -from delphixpy.web import job +from delphixpy.v1_8_0.exceptions import HttpError +from delphixpy.v1_8_0.exceptions import JobError +from delphixpy.v1_8_0.exceptions import RequestError +from delphixpy.v1_8_0.web import job from lib.DlpxException import DlpxException from lib.DxLogging import logging_est diff --git a/dx_delete_vdb.py b/v1_8_0/dx_delete_vdb.py similarity index 98% rename from dx_delete_vdb.py rename to v1_8_0/dx_delete_vdb.py index ca2d242..95a6303 100755 --- a/dx_delete_vdb.py +++ b/v1_8_0/dx_delete_vdb.py @@ -69,11 +69,11 @@ from multiprocessing import Process from time import sleep, time -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.exceptions import HttpError, JobError -from delphixpy.v1_6_0 import job_context -from delphixpy.v1_6_0.web import database, environment, group, job, source, user -from delphixpy.v1_6_0.web.vo import ASESpecificBackupSyncParameters, ASENewBackupSyncParameters, ASELatestBackupSyncParameters, MSSqlSyncParameters +from delphixpy.v1_8_0.delphix_engine import DelphixEngine +from delphixpy.v1_8_0.exceptions import HttpError, JobError +from delphixpy.v1_8_0 import job_context +from delphixpy.v1_8_0.web import database, environment, group, job, source, user +from delphixpy.v1_8_0.web.vo import ASESpecificBackupSyncParameters, ASENewBackupSyncParameters, ASELatestBackupSyncParameters, MSSqlSyncParameters def find_obj_by_name(engine, server, f_class, obj_name): @@ -570,4 +570,4 @@ def main(argv): sys.exit() #Feed our arguments to the main function, and off we go! print arguments - main(arguments) \ No newline at end of file + main(arguments) diff --git a/dx_environment.py b/v1_8_0/dx_environment.py similarity index 97% rename from dx_environment.py rename to v1_8_0/dx_environment.py index 8cb987d..2a77d42 100755 --- a/dx_environment.py +++ b/v1_8_0/dx_environment.py @@ -78,18 +78,18 @@ import traceback from time import sleep, time -from delphixpy.exceptions import HttpError -from delphixpy.exceptions import JobError -from delphixpy.exceptions import RequestError -from delphixpy.web import environment -from delphixpy.web import job -from delphixpy.web import host -from delphixpy.web.vo import UnixHostEnvironment -from delphixpy.web.vo import ASEHostEnvironmentParameters -from delphixpy.web.vo import HostEnvironmentCreateParameters -from delphixpy.web.vo import WindowsHostEnvironment -from delphixpy.web.vo import WindowsHost -from delphixpy.web.vo import UnixHost +from delphixpy.v1_8_0.exceptions import HttpError +from delphixpy.v1_8_0.exceptions import JobError +from delphixpy.v1_8_0.exceptions import RequestError +from delphixpy.v1_8_0.web import environment +from delphixpy.v1_8_0.web import job +from delphixpy.v1_8_0.web import host +from delphixpy.v1_8_0.web.vo import UnixHostEnvironment +from delphixpy.v1_8_0.web.vo import ASEHostEnvironmentParameters +from delphixpy.v1_8_0.web.vo import HostEnvironmentCreateParameters +from delphixpy.v1_8_0.web.vo import WindowsHostEnvironment +from delphixpy.v1_8_0.web.vo import WindowsHost +from delphixpy.v1_8_0.web.vo import UnixHost from lib.DlpxException import DlpxException from lib.GetSession import GetSession diff --git a/dx_groups.py b/v1_8_0/dx_groups.py similarity index 97% rename from dx_groups.py rename to v1_8_0/dx_groups.py index dbb75c7..ee26847 100755 --- a/dx_groups.py +++ b/v1_8_0/dx_groups.py @@ -51,12 +51,12 @@ from time import sleep, time from docopt import docopt -from delphixpy.exceptions import HttpError -from delphixpy.exceptions import JobError -from delphixpy.exceptions import RequestError -from delphixpy.web import job -from delphixpy.web import group -from delphixpy.web.vo import Group +from delphixpy.v1_8_0.exceptions import HttpError +from delphixpy.v1_8_0.exceptions import JobError +from delphixpy.v1_8_0.exceptions import RequestError +from delphixpy.v1_8_0.web import job +from delphixpy.v1_8_0.web import group +from delphixpy.v1_8_0.web.vo import Group from lib.DlpxException import DlpxException from lib.DxLogging import logging_est diff --git a/dx_jetstream_container.py b/v1_8_0/dx_jetstream_container.py similarity index 98% rename from dx_jetstream_container.py rename to v1_8_0/dx_jetstream_container.py index 0ba25db..db7f590 100755 --- a/dx_jetstream_container.py +++ b/v1_8_0/dx_jetstream_container.py @@ -70,12 +70,11 @@ from multiprocessing import Process from time import sleep, time -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.exceptions import HttpError, JobError -from delphixpy.v1_6_0 import job_context -from delphixpy.v1_6_0.web import jetstream, job -from delphixpy.v1_6_0.web.vo import JSBookmark, JSBookmarkCreateParameters, JSTimelinePointLatestTimeInput -#from delphixpy.v1_6_0.web.vo import +from delphixpy.v1_8_0.delphix_engine import DelphixEngine +from delphixpy.v1_8_0.exceptions import HttpError, JobError +from delphixpy.v1_8_0 import job_context +from delphixpy.v1_8_0.web import jetstream, job +from delphixpy.v1_8_0.web.vo import JSBookmark, JSBookmarkCreateParameters, JSTimelinePointLatestTimeInput def run_async(func): """ @@ -586,4 +585,4 @@ def main(argv): #Feed our arguments to the main function, and off we go! print arguments - main(arguments) \ No newline at end of file + main(arguments) diff --git a/dx_jobs.py b/v1_8_0/dx_jobs.py similarity index 98% rename from dx_jobs.py rename to v1_8_0/dx_jobs.py index b04eedb..069853b 100755 --- a/dx_jobs.py +++ b/v1_8_0/dx_jobs.py @@ -50,10 +50,10 @@ from time import sleep, time from docopt import docopt -from delphixpy.exceptions import HttpError -from delphixpy.exceptions import JobError -from delphixpy.exceptions import RequestError -from delphixpy.web import job +from delphixpy.v1_8_0.exceptions import HttpError +from delphixpy.v1_8_0.exceptions import JobError +from delphixpy.v1_8_0.exceptions import RequestError +from delphixpy.v1_8_0.web import job from lib.DlpxException import DlpxException from lib.DxLogging import logging_est diff --git a/dx_operations.py b/v1_8_0/dx_operations.py similarity index 98% rename from dx_operations.py rename to v1_8_0/dx_operations.py index 18c37c3..ff72cb8 100755 --- a/dx_operations.py +++ b/v1_8_0/dx_operations.py @@ -52,13 +52,13 @@ from time import sleep, time import traceback -from delphixpy.exceptions import HttpError -from delphixpy.exceptions import JobError -from delphixpy.exceptions import RequestError -from delphixpy.web import database -from delphixpy.web import job -from delphixpy.web import source -from delphixpy.web.capacity import consumer +from delphixpy.v1_8_0.exceptions import HttpError +from delphixpy.v1_8_0.exceptions import JobError +from delphixpy.v1_8_0.exceptions import RequestError +from delphixpy.v1_8_0.web import database +from delphixpy.v1_8_0.web import job +from delphixpy.v1_8_0.web import source +from delphixpy.v1_8_0.web.capacity import consumer from docopt import docopt from lib.DlpxException import DlpxException @@ -412,4 +412,4 @@ def main(): arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) # Feed our arguments to the main function, and off we go! - main() \ No newline at end of file + main() diff --git a/dx_operations_vdb.py b/v1_8_0/dx_operations_vdb.py similarity index 100% rename from dx_operations_vdb.py rename to v1_8_0/dx_operations_vdb.py diff --git a/dx_operations_vdb_orig.py b/v1_8_0/dx_operations_vdb_orig.py similarity index 97% rename from dx_operations_vdb_orig.py rename to v1_8_0/dx_operations_vdb_orig.py index 4d49774..60d4dcf 100755 --- a/dx_operations_vdb_orig.py +++ b/v1_8_0/dx_operations_vdb_orig.py @@ -53,16 +53,16 @@ from docopt import docopt import re -from delphixpy.exceptions import HttpError -from delphixpy.exceptions import JobError -from delphixpy.exceptions import RequestError -from delphixpy.web import database -from delphixpy.web import job -from delphixpy.web import source -from delphixpy.web import sourceconfig -from delphixpy.web import repository -from delphixpy.web import environment -from delphixpy.web.capacity import consumer +from delphixpy.v1_8_0.exceptions import HttpError +from delphixpy.v1_8_0.exceptions import JobError +from delphixpy.v1_8_0.exceptions import RequestError +from delphixpy.v1_8_0.web import database +from delphixpy.v1_8_0.web import job +from delphixpy.v1_8_0.web import source +from delphixpy.v1_8_0.web import sourceconfig +from delphixpy.v1_8_0.web import repository +from delphixpy.v1_8_0.web import environment +from delphixpy.v1_8_0.web.capacity import consumer from lib.DlpxException import DlpxException from lib.DxLogging import logging_est diff --git a/dx_provision_dsource.py b/v1_8_0/dx_provision_dsource.py similarity index 96% rename from dx_provision_dsource.py rename to v1_8_0/dx_provision_dsource.py index 0103fdb..dac49fb 100755 --- a/dx_provision_dsource.py +++ b/v1_8_0/dx_provision_dsource.py @@ -99,27 +99,26 @@ from time import sleep, time from docopt import docopt, DocoptExit -from delphixpy.exceptions import HttpError -from delphixpy.exceptions import JobError -from delphixpy.exceptions import RequestError -from delphixpy.web import sourceconfig -from delphixpy.web import group -from delphixpy.web import job -from delphixpy.web import environment -from delphixpy.web import repository -#from delphixpy.web.database import link -from delphixpy.web import database -from delphixpy.web.vo import OracleSIConfig -from delphixpy.web.vo import OracleInstance -from delphixpy.web.vo import LinkParameters -from delphixpy.web.vo import OracleLinkData -from delphixpy.web.vo import OracleSourcingPolicy -from delphixpy.web.vo import ASELinkData -from delphixpy.web.vo import ASELatestBackupSyncParameters -from delphixpy.web.vo import ASENewBackupSyncParameters -from delphixpy.web.vo import ASESpecificBackupSyncParameters -from delphixpy.web.vo import MSSqlLinkData -from delphixpy.web.vo import SourcingPolicy +from delphixpy.v1_8_0.exceptions import HttpError +from delphixpy.v1_8_0.exceptions import JobError +from delphixpy.v1_8_0.exceptions import RequestError +from delphixpy.v1_8_0.web import sourceconfig +from delphixpy.v1_8_0.web import group +from delphixpy.v1_8_0.web import job +from delphixpy.v1_8_0.web import environment +from delphixpy.v1_8_0.web import repository +from delphixpy.v1_8_0.web import database +from delphixpy.v1_8_0.web.vo import OracleSIConfig +from delphixpy.v1_8_0.web.vo import OracleInstance +from delphixpy.v1_8_0.web.vo import LinkParameters +from delphixpy.v1_8_0.web.vo import OracleLinkData +from delphixpy.v1_8_0.web.vo import OracleSourcingPolicy +from delphixpy.v1_8_0.web.vo import ASELinkData +from delphixpy.v1_8_0.web.vo import ASELatestBackupSyncParameters +from delphixpy.v1_8_0.web.vo import ASENewBackupSyncParameters +from delphixpy.v1_8_0.web.vo import ASESpecificBackupSyncParameters +from delphixpy.v1_8_0.web.vo import MSSqlLinkData +from delphixpy.v1_8_0.web.vo import SourcingPolicy from lib.DlpxException import DlpxException from lib.GetReferences import find_obj_by_name diff --git a/dx_provision_vdb.py b/v1_8_0/dx_provision_vdb.py similarity index 96% rename from dx_provision_vdb.py rename to v1_8_0/dx_provision_vdb.py index 8a4580c..dcdfe71 100755 --- a/dx_provision_vdb.py +++ b/v1_8_0/dx_provision_vdb.py @@ -109,39 +109,39 @@ from os.path import basename from time import sleep, time -from delphixpy.delphix_engine import DelphixEngine -from delphixpy.exceptions import HttpError -from delphixpy.exceptions import JobError -from delphixpy.exceptions import RequestError -from delphixpy.web import database -from delphixpy.web import environment -from delphixpy.web import group -from delphixpy.web import job -from delphixpy.web import repository -from delphixpy.web import snapshot -from delphixpy.web import source -from delphixpy.web.database import template -from delphixpy.web.vo import VirtualSourceOperations -from delphixpy.web.vo import OracleDatabaseContainer -from delphixpy.web.vo import OracleInstance -from delphixpy.web.vo import OracleProvisionParameters -from delphixpy.web.vo import OracleSIConfig -from delphixpy.web.vo import OracleVirtualSource -from delphixpy.web.vo import TimeflowPointLocation -from delphixpy.web.vo import TimeflowPointSemantic -from delphixpy.web.vo import TimeflowPointTimestamp -from delphixpy.web.vo import ASEDBContainer -from delphixpy.web.vo import ASEInstanceConfig -from delphixpy.web.vo import ASEProvisionParameters -from delphixpy.web.vo import ASESIConfig -from delphixpy.web.vo import ASEVirtualSource -from delphixpy.web.vo import MSSqlProvisionParameters -from delphixpy.web.vo import MSSqlDatabaseContainer -from delphixpy.web.vo import MSSqlVirtualSource -from delphixpy.web.vo import MSSqlSIConfig -from delphixpy.web.vo import AppDataVirtualSource -from delphixpy.web.vo import AppDataProvisionParameters -from delphixpy.web.vo import AppDataDirectSourceConfig +from delphixpy.v1_8_0.delphix_engine import DelphixEngine +from delphixpy.v1_8_0.exceptions import HttpError +from delphixpy.v1_8_0.exceptions import JobError +from delphixpy.v1_8_0.exceptions import RequestError +from delphixpy.v1_8_0.web import database +from delphixpy.v1_8_0.web import environment +from delphixpy.v1_8_0.web import group +from delphixpy.v1_8_0.web import job +from delphixpy.v1_8_0.web import repository +from delphixpy.v1_8_0.web import snapshot +from delphixpy.v1_8_0.web import source +from delphixpy.v1_8_0.web.database import template +from delphixpy.v1_8_0.web.vo import VirtualSourceOperations +from delphixpy.v1_8_0.web.vo import OracleDatabaseContainer +from delphixpy.v1_8_0.web.vo import OracleInstance +from delphixpy.v1_8_0.web.vo import OracleProvisionParameters +from delphixpy.v1_8_0.web.vo import OracleSIConfig +from delphixpy.v1_8_0.web.vo import OracleVirtualSource +from delphixpy.v1_8_0.web.vo import TimeflowPointLocation +from delphixpy.v1_8_0.web.vo import TimeflowPointSemantic +from delphixpy.v1_8_0.web.vo import TimeflowPointTimestamp +from delphixpy.v1_8_0.web.vo import ASEDBContainer +from delphixpy.v1_8_0.web.vo import ASEInstanceConfig +from delphixpy.v1_8_0.web.vo import ASEProvisionParameters +from delphixpy.v1_8_0.web.vo import ASESIConfig +from delphixpy.v1_8_0.web.vo import ASEVirtualSource +from delphixpy.v1_8_0.web.vo import MSSqlProvisionParameters +from delphixpy.v1_8_0.web.vo import MSSqlDatabaseContainer +from delphixpy.v1_8_0.web.vo import MSSqlVirtualSource +from delphixpy.v1_8_0.web.vo import MSSqlSIConfig +from delphixpy.v1_8_0.web.vo import AppDataVirtualSource +from delphixpy.v1_8_0.web.vo import AppDataProvisionParameters +from delphixpy.v1_8_0.web.vo import AppDataDirectSourceConfig from lib.DxTimeflow import DxTimeflow from lib.DlpxException import DlpxException diff --git a/dx_refresh_db.py b/v1_8_0/dx_refresh_db.py similarity index 100% rename from dx_refresh_db.py rename to v1_8_0/dx_refresh_db.py diff --git a/dx_replication.py b/v1_8_0/dx_replication.py similarity index 97% rename from dx_replication.py rename to v1_8_0/dx_replication.py index f6c7def..2e5c2ba 100755 --- a/dx_replication.py +++ b/v1_8_0/dx_replication.py @@ -62,14 +62,14 @@ from time import sleep, time from docopt import docopt -from delphixpy.exceptions import HttpError -from delphixpy.exceptions import JobError -from delphixpy.exceptions import RequestError -from delphixpy.web import job -from delphixpy.web import database -from delphixpy.web.replication import spec -from delphixpy.web.vo import ReplicationSpec -from delphixpy.web.vo import ReplicationList +from delphixpy.v1_8_0.exceptions import HttpError +from delphixpy.v1_8_0.exceptions import JobError +from delphixpy.v1_8_0.exceptions import RequestError +from delphixpy.v1_8_0.web import job +from delphixpy.v1_8_0.web import database +from delphixpy.v1_8_0.web.replication import spec +from delphixpy.v1_8_0.web.vo import ReplicationSpec +from delphixpy.v1_8_0.web.vo import ReplicationList from lib.DlpxException import DlpxException from lib.DxLogging import logging_est diff --git a/dx_rewind_vdb.py b/v1_8_0/dx_rewind_vdb.py similarity index 97% rename from dx_rewind_vdb.py rename to v1_8_0/dx_rewind_vdb.py index f7c7327..fbdc54c 100755 --- a/dx_rewind_vdb.py +++ b/v1_8_0/dx_rewind_vdb.py @@ -62,13 +62,13 @@ from time import time, sleep import traceback -from delphixpy.exceptions import HttpError -from delphixpy.exceptions import JobError -from delphixpy.exceptions import RequestError -from delphixpy.web import database -from delphixpy.web import job -from delphixpy.web.vo import RollbackParameters -from delphixpy.web.vo import OracleRollbackParameters +from delphixpy.v1_8_0.exceptions import HttpError +from delphixpy.v1_8_0.exceptions import JobError +from delphixpy.v1_8_0.exceptions import RequestError +from delphixpy.v1_8_0.web import database +from delphixpy.v1_8_0.web import job +from delphixpy.v1_8_0.web.vo import RollbackParameters +from delphixpy.v1_8_0.web.vo import OracleRollbackParameters from lib.DlpxException import DlpxException from lib.DxTimeflow import DxTimeflow @@ -383,4 +383,4 @@ def main(): arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) # Feed our arguments to the main function, and off we go! - main() \ No newline at end of file + main() diff --git a/dx_skel.py b/v1_8_0/dx_skel.py similarity index 98% rename from dx_skel.py rename to v1_8_0/dx_skel.py index 1ea6bce..d58712a 100755 --- a/dx_skel.py +++ b/v1_8_0/dx_skel.py @@ -43,10 +43,10 @@ from time import sleep, time from docopt import docopt -from delphixpy.exceptions import HttpError -from delphixpy.exceptions import JobError -from delphixpy.exceptions import RequestError -from delphixpy.web import job +from delphixpy.v1_8_0.exceptions import HttpError +from delphixpy.v1_8_0.exceptions import JobError +from delphixpy.v1_8_0.exceptions import RequestError +from delphixpy.v1_8_0.web import job from lib.DlpxException import DlpxException from lib.DxLogging import logging_est diff --git a/dx_snapshot_db.py b/v1_8_0/dx_snapshot_db.py similarity index 98% rename from dx_snapshot_db.py rename to v1_8_0/dx_snapshot_db.py index 19f2342..a1a610f 100755 --- a/dx_snapshot_db.py +++ b/v1_8_0/dx_snapshot_db.py @@ -75,11 +75,11 @@ from multiprocessing import Process from time import sleep, time -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.exceptions import HttpError, JobError -from delphixpy.v1_6_0 import job_context -from delphixpy.v1_6_0.web import database, environment, group, job, source, user -from delphixpy.v1_6_0.web.vo import ASESpecificBackupSyncParameters, ASENewBackupSyncParameters, ASELatestBackupSyncParameters, MSSqlSyncParameters +from delphixpy.v1_8_0.delphix_engine import DelphixEngine +from delphixpy.v1_8_0.exceptions import HttpError, JobError +from delphixpy.v1_8_0 import job_context +from delphixpy.v1_8_0.web import database, environment, group, job, source, user +from delphixpy.v1_8_0.web.vo import ASESpecificBackupSyncParameters, ASENewBackupSyncParameters, ASELatestBackupSyncParameters, MSSqlSyncParameters def ase_latest_backup_sync_parameters(): obj = ASELatestBackupSyncParameters() @@ -616,4 +616,4 @@ def main(argv): print(__doc__) sys.exit() #Feed our arguments to the main function, and off we go! - main(arguments) \ No newline at end of file + main(arguments) diff --git a/dx_update_env.py b/v1_8_0/dx_update_env.py similarity index 96% rename from dx_update_env.py rename to v1_8_0/dx_update_env.py index a343079..99e5600 100755 --- a/dx_update_env.py +++ b/v1_8_0/dx_update_env.py @@ -44,13 +44,13 @@ from time import sleep, time from docopt import docopt -from delphixpy.exceptions import HttpError -from delphixpy.exceptions import JobError -from delphixpy.exceptions import RequestError -from delphixpy.web import job -from delphixpy.web import environment -from delphixpy.web.vo import ASEHostEnvironmentParameters -from delphixpy.web.vo import UnixHostEnvironment +from delphixpy.v1_8_0.exceptions import HttpError +from delphixpy.v1_8_0.exceptions import JobError +from delphixpy.v1_8_0.exceptions import RequestError +from delphixpy.v1_8_0.web import job +from delphixpy.v1_8_0.web import environment +from delphixpy.v1_8_0.web.vo import ASEHostEnvironmentParameters +from delphixpy.v1_8_0.web.vo import UnixHostEnvironment from lib.DlpxException import DlpxException diff --git a/dx_users.py b/v1_8_0/dx_users.py similarity index 96% rename from dx_users.py rename to v1_8_0/dx_users.py index e7e08cd..dffd138 100755 --- a/dx_users.py +++ b/v1_8_0/dx_users.py @@ -61,17 +61,17 @@ from time import sleep, time from docopt import docopt -from delphixpy.exceptions import HttpError -from delphixpy.exceptions import JobError -from delphixpy.exceptions import RequestError -from delphixpy.web import authorization -from delphixpy.web import job -from delphixpy.web import user -from delphixpy.web import role -from delphixpy.web.vo import Authorization -from delphixpy.web.vo import User -from delphixpy.web.vo import PasswordCredential -from delphixpy.web.vo import CredentialUpdateParameters +from delphixpy.v1_8_0.exceptions import HttpError +from delphixpy.v1_8_0.exceptions import JobError +from delphixpy.v1_8_0.exceptions import RequestError +from delphixpy.v1_8_0.web import authorization +from delphixpy.v1_8_0.web import job +from delphixpy.v1_8_0.web import user +from delphixpy.v1_8_0.web import role +from delphixpy.v1_8_0.web.vo import Authorization +from delphixpy.v1_8_0.web.vo import User +from delphixpy.v1_8_0.web.vo import PasswordCredential +from delphixpy.v1_8_0.web.vo import CredentialUpdateParameters from lib.DlpxException import DlpxException from lib.DxLogging import logging_est diff --git a/dxtools.conf b/v1_8_0/dxtools.conf similarity index 100% rename from dxtools.conf rename to v1_8_0/dxtools.conf diff --git a/engine_network_assignment.py b/v1_8_0/engine_network_assignment.py similarity index 96% rename from engine_network_assignment.py rename to v1_8_0/engine_network_assignment.py index 30fa09b..f948e2a 100755 --- a/engine_network_assignment.py +++ b/v1_8_0/engine_network_assignment.py @@ -16,10 +16,10 @@ import errno from socket import error as socket_error -from delphixpy.delphix_engine import DelphixEngine -from delphixpy.exceptions import HttpError,JobError -from delphixpy.web import network, system, user, service -from delphixpy.web.vo import PasswordCredential, User, NetworkInterface, \ +from delphixpy.v1_8_0.delphix_engine import DelphixEngine +from delphixpy.v1_8_0.exceptions import HttpError,JobError +from delphixpy.v1_8_0.web import network, system, user, service +from delphixpy.v1_8_0.web.vo import PasswordCredential, User, NetworkInterface, \ InterfaceAddress, DNSConfig, SystemInfo, NetworkRoute def system_serversess(f_engine_address, f_engine_username, f_engine_password): @@ -246,4 +246,4 @@ def main(argv): sys.exit(2) if __name__ == "__main__": - main(sys.argv[1:]) \ No newline at end of file + main(sys.argv[1:]) diff --git a/engine_setup.py b/v1_8_0/engine_setup.py similarity index 96% rename from engine_setup.py rename to v1_8_0/engine_setup.py index 718d1f5..c68269d 100755 --- a/engine_setup.py +++ b/v1_8_0/engine_setup.py @@ -16,10 +16,10 @@ import traceback import untangle -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.exceptions import HttpError,JobError -from delphixpy.v1_6_0.web import domain, storage, user -from delphixpy.v1_6_0.web.vo import CredentialUpdateParameters, PasswordCredential, DomainCreateParameters, User +from delphixpy.v1_8_0.delphix_engine import DelphixEngine +from delphixpy.v1_8_0.exceptions import HttpError,JobError +from delphixpy.v1_8_0.web import domain, storage, user +from delphixpy.v1_8_0.web.vo import CredentialUpdateParameters, PasswordCredential, DomainCreateParameters, User from lib.GetSession import GetSession def system_serversess(f_engine_address, f_engine_username, f_engine_password): @@ -200,4 +200,4 @@ def main(argv): sys.exit(2) if __name__ == "__main__": - main(sys.argv[1:]) \ No newline at end of file + main(sys.argv[1:]) diff --git a/find_missing_archivelogs.py b/v1_8_0/find_missing_archivelogs.py similarity index 100% rename from find_missing_archivelogs.py rename to v1_8_0/find_missing_archivelogs.py diff --git a/get_engine_pub_key.py b/v1_8_0/get_engine_pub_key.py similarity index 96% rename from get_engine_pub_key.py rename to v1_8_0/get_engine_pub_key.py index bda79bb..630fe81 100755 --- a/get_engine_pub_key.py +++ b/v1_8_0/get_engine_pub_key.py @@ -14,9 +14,9 @@ import traceback import untangle -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.exceptions import HttpError,JobError -from delphixpy.v1_6_0.web import system +from delphixpy.v1_8_0.delphix_engine import DelphixEngine +from delphixpy.v1_8_0.exceptions import HttpError,JobError +from delphixpy.v1_8_0.web import system from lib.GetSession import GetSession @@ -166,4 +166,4 @@ def main(argv): sys.exit(2) if __name__ == "__main__": - main(sys.argv[1:]) \ No newline at end of file + main(sys.argv[1:]) diff --git a/js_bookmark.py b/v1_8_0/js_bookmark.py similarity index 100% rename from js_bookmark.py rename to v1_8_0/js_bookmark.py diff --git a/js_branch.py b/v1_8_0/js_branch.py similarity index 100% rename from js_branch.py rename to v1_8_0/js_branch.py diff --git a/js_container.py b/v1_8_0/js_container.py similarity index 100% rename from js_container.py rename to v1_8_0/js_container.py diff --git a/js_template.py b/v1_8_0/js_template.py similarity index 100% rename from js_template.py rename to v1_8_0/js_template.py diff --git a/lib/DlpxException.py b/v1_8_0/lib/DlpxException.py similarity index 89% rename from lib/DlpxException.py rename to v1_8_0/lib/DlpxException.py index 874e732..e6db45c 100644 --- a/lib/DlpxException.py +++ b/v1_8_0/lib/DlpxException.py @@ -11,5 +11,4 @@ class DlpxException(Exception): def __init__(self, message): -# print_exception(message) Exception.__init__(self, message) diff --git a/lib/DxLogging.py b/v1_8_0/lib/DxLogging.py similarity index 100% rename from lib/DxLogging.py rename to v1_8_0/lib/DxLogging.py diff --git a/lib/DxTimeflow.py b/v1_8_0/lib/DxTimeflow.py similarity index 100% rename from lib/DxTimeflow.py rename to v1_8_0/lib/DxTimeflow.py diff --git a/lib/GetReferences.py b/v1_8_0/lib/GetReferences.py similarity index 100% rename from lib/GetReferences.py rename to v1_8_0/lib/GetReferences.py diff --git a/lib/GetSession.py b/v1_8_0/lib/GetSession.py similarity index 100% rename from lib/GetSession.py rename to v1_8_0/lib/GetSession.py diff --git a/lib/__init__.py b/v1_8_0/lib/__init__.py similarity index 100% rename from lib/__init__.py rename to v1_8_0/lib/__init__.py diff --git a/list_all_databases.py b/v1_8_0/list_all_databases.py similarity index 70% rename from list_all_databases.py rename to v1_8_0/list_all_databases.py index f3c52e5..dbc5f2e 100755 --- a/list_all_databases.py +++ b/v1_8_0/list_all_databases.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.web import database +from delphixpy.v1_8_0.delphix_engine import DelphixEngine +from delphixpy.v1_8_0.web import database server_session= DelphixEngine("landsharkengine", "delphix_admin", "landshark", "DOMAIN") @@ -12,4 +12,4 @@ print str(len(all_databases)) + " databases in the LandsharkEngine" for each in all_databases: - print each.name \ No newline at end of file + print each.name diff --git a/simple_snapshot.py b/v1_8_0/simple_snapshot.py similarity index 75% rename from simple_snapshot.py rename to v1_8_0/simple_snapshot.py index a726aa7..3317dca 100755 --- a/simple_snapshot.py +++ b/v1_8_0/simple_snapshot.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.web import group, database +from delphixpy.v1_8_0.delphix_engine import DelphixEngine +from delphixpy.v1_8_0.web import group, database group_name = "Dev Copies" database_name = "Employee DB - Dev" @@ -20,4 +20,4 @@ database_reference = obj.reference break -database.sync(server_session, database_reference) \ No newline at end of file +database.sync(server_session, database_reference) diff --git a/snapshot_group.py b/v1_8_0/snapshot_group.py similarity index 70% rename from snapshot_group.py rename to v1_8_0/snapshot_group.py index 17f1ce3..203da2b 100755 --- a/snapshot_group.py +++ b/v1_8_0/snapshot_group.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.web import group, database -from delphixpy.v1_6_0 import job_context +from delphixpy.v1_8_0.delphix_engine import DelphixEngine +from delphixpy.v1_8_0.web import group, database +from delphixpy.v1_8_0 import job_context group_name = "Dev Copies" #database_name = "Employee DB - Dev" @@ -18,4 +18,4 @@ with job_context.async(server_session): for obj in database_objs: - database.sync(server_session, obj.reference) \ No newline at end of file + database.sync(server_session, obj.reference) diff --git a/test_dx_authorization.py b/v1_8_0/test_dx_authorization.py similarity index 100% rename from test_dx_authorization.py rename to v1_8_0/test_dx_authorization.py diff --git a/test_dx_operation.py b/v1_8_0/test_dx_operation.py similarity index 100% rename from test_dx_operation.py rename to v1_8_0/test_dx_operation.py diff --git a/test_js_bookmarks.py b/v1_8_0/test_js_bookmarks.py similarity index 100% rename from test_js_bookmarks.py rename to v1_8_0/test_js_bookmarks.py diff --git a/test_js_branches.py b/v1_8_0/test_js_branches.py similarity index 100% rename from test_js_branches.py rename to v1_8_0/test_js_branches.py diff --git a/test_js_containers.py b/v1_8_0/test_js_containers.py similarity index 100% rename from test_js_containers.py rename to v1_8_0/test_js_containers.py diff --git a/test_js_templates.py b/v1_8_0/test_js_templates.py similarity index 100% rename from test_js_templates.py rename to v1_8_0/test_js_templates.py diff --git a/trigger_replication.py b/v1_8_0/trigger_replication.py similarity index 89% rename from trigger_replication.py rename to v1_8_0/trigger_replication.py index 939a2a4..451239f 100755 --- a/trigger_replication.py +++ b/v1_8_0/trigger_replication.py @@ -3,9 +3,9 @@ VERSION="v.0.0.002" #just a quick and dirty example of executing a replication profile -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.web import replication -from delphixpy.v1_6_0.web.vo import ReplicationSpec +from delphixpy.v1_8_0.delphix_engine import DelphixEngine +from delphixpy.v1_8_0.web import replication +from delphixpy.v1_8_0.web.vo import ReplicationSpec engine_address = "192.168.218.177" engine_username = "delphix_admin" @@ -53,4 +53,4 @@ def find_obj_by_name(server, f_class, obj_name): replication.spec.execute(server, replication_spec.reference) -print replication_profile_name + " executed." \ No newline at end of file +print replication_profile_name + " executed." From 177252096e89f6a4272643b23a253f0a4e3c41e4 Mon Sep 17 00:00:00 2001 From: Corey Brune Date: Sun, 17 Sep 2017 09:47:38 -0500 Subject: [PATCH 2/6] Adding API v1.8.2 --- v1_8_2/add_windows_env.py | 38 + v1_8_2/delphix_admin_setup.py | 168 +++ v1_8_2/delphix_snapshot_group_will_plugin.py | 31 + v1_8_2/delphix_will_plugin.py | 96 ++ v1_8_2/dx_authorization.py | 447 +++++++ v1_8_2/dx_database.py | 311 +++++ v1_8_2/dx_delete_vdb.py | 573 +++++++++ v1_8_2/dx_environment.py | 697 +++++++++++ v1_8_2/dx_groups.py | 363 ++++++ v1_8_2/dx_jetstream_container.py | 588 +++++++++ v1_8_2/dx_jobs.py | 367 ++++++ v1_8_2/dx_operations.py | 415 +++++++ v1_8_2/dx_operations_vdb.py | 424 +++++++ v1_8_2/dx_operations_vdb_orig.py | 431 +++++++ v1_8_2/dx_provision_dsource.py | 623 ++++++++++ v1_8_2/dx_provision_vdb.py | 1140 ++++++++++++++++++ v1_8_2/dx_refresh_db.py | 905 ++++++++++++++ v1_8_2/dx_replication.py | 421 +++++++ v1_8_2/dx_rewind_vdb.py | 386 ++++++ v1_8_2/dx_skel.py | 315 +++++ v1_8_2/dx_snapshot_db.py | 619 ++++++++++ v1_8_2/dx_update_env.py | 326 +++++ v1_8_2/dx_users.py | 440 +++++++ v1_8_2/dxtools.conf | 13 + v1_8_2/engine_network_assignment.py | 249 ++++ v1_8_2/engine_setup.py | 203 ++++ v1_8_2/find_missing_archivelogs.py | 339 ++++++ v1_8_2/get_engine_pub_key.py | 169 +++ v1_8_2/js_bookmark.py | 512 ++++++++ v1_8_2/js_branch.py | 465 +++++++ v1_8_2/js_container.py | 608 ++++++++++ v1_8_2/js_template.py | 413 +++++++ v1_8_2/lib/DlpxException.py | 14 + v1_8_2/lib/DxLogging.py | 63 + v1_8_2/lib/DxTimeflow.py | 313 +++++ v1_8_2/lib/GetReferences.py | 288 +++++ v1_8_2/lib/GetSession.py | 169 +++ v1_8_2/lib/__init__.py | 5 + v1_8_2/list_all_databases.py | 15 + v1_8_2/simple_snapshot.py | 23 + v1_8_2/snapshot_group.py | 21 + v1_8_2/test_dx_authorization.py | 80 ++ v1_8_2/test_dx_operation.py | 44 + v1_8_2/test_js_bookmarks.py | 63 + v1_8_2/test_js_branches.py | 69 ++ v1_8_2/test_js_containers.py | 89 ++ v1_8_2/test_js_templates.py | 51 + v1_8_2/trigger_replication.py | 56 + 48 files changed, 14458 insertions(+) create mode 100755 v1_8_2/add_windows_env.py create mode 100755 v1_8_2/delphix_admin_setup.py create mode 100755 v1_8_2/delphix_snapshot_group_will_plugin.py create mode 100755 v1_8_2/delphix_will_plugin.py create mode 100755 v1_8_2/dx_authorization.py create mode 100755 v1_8_2/dx_database.py create mode 100755 v1_8_2/dx_delete_vdb.py create mode 100755 v1_8_2/dx_environment.py create mode 100755 v1_8_2/dx_groups.py create mode 100755 v1_8_2/dx_jetstream_container.py create mode 100755 v1_8_2/dx_jobs.py create mode 100755 v1_8_2/dx_operations.py create mode 100755 v1_8_2/dx_operations_vdb.py create mode 100755 v1_8_2/dx_operations_vdb_orig.py create mode 100755 v1_8_2/dx_provision_dsource.py create mode 100755 v1_8_2/dx_provision_vdb.py create mode 100755 v1_8_2/dx_refresh_db.py create mode 100755 v1_8_2/dx_replication.py create mode 100755 v1_8_2/dx_rewind_vdb.py create mode 100755 v1_8_2/dx_skel.py create mode 100755 v1_8_2/dx_snapshot_db.py create mode 100755 v1_8_2/dx_update_env.py create mode 100755 v1_8_2/dx_users.py create mode 100755 v1_8_2/dxtools.conf create mode 100755 v1_8_2/engine_network_assignment.py create mode 100755 v1_8_2/engine_setup.py create mode 100755 v1_8_2/find_missing_archivelogs.py create mode 100755 v1_8_2/get_engine_pub_key.py create mode 100755 v1_8_2/js_bookmark.py create mode 100755 v1_8_2/js_branch.py create mode 100755 v1_8_2/js_container.py create mode 100755 v1_8_2/js_template.py create mode 100644 v1_8_2/lib/DlpxException.py create mode 100644 v1_8_2/lib/DxLogging.py create mode 100644 v1_8_2/lib/DxTimeflow.py create mode 100644 v1_8_2/lib/GetReferences.py create mode 100644 v1_8_2/lib/GetSession.py create mode 100644 v1_8_2/lib/__init__.py create mode 100755 v1_8_2/list_all_databases.py create mode 100755 v1_8_2/simple_snapshot.py create mode 100755 v1_8_2/snapshot_group.py create mode 100755 v1_8_2/test_dx_authorization.py create mode 100755 v1_8_2/test_dx_operation.py create mode 100755 v1_8_2/test_js_bookmarks.py create mode 100755 v1_8_2/test_js_branches.py create mode 100755 v1_8_2/test_js_containers.py create mode 100755 v1_8_2/test_js_templates.py create mode 100755 v1_8_2/trigger_replication.py diff --git a/v1_8_2/add_windows_env.py b/v1_8_2/add_windows_env.py new file mode 100755 index 0000000..0081c32 --- /dev/null +++ b/v1_8_2/add_windows_env.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +#Adam Bowen Sept 2016 +VERSION="v.0.0.001" +#just a quick and dirty example of adding a windows source + +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.web import environment +from delphixpy.web.vo import HostEnvironmentCreateParameters, EnvironmentUser, PasswordCredential, \ + WindowsHostEnvironment, WindowsHostCreateParameters, WindowsHost + +engine_address = "192.168.2.37" +engine_username = "delphix_admin" +engine_password = "landshark" + +def serversess(f_engine_address, f_engine_username, f_engine_password): + """ + Function to setup the session with the Delphix Engine + """ + server_session= DelphixEngine(f_engine_address, f_engine_username, f_engine_password, "DOMAIN") + return server_session + +server = serversess(engine_address, engine_username, engine_password) + +envCreateParams = HostEnvironmentCreateParameters() + + +envCreateParams.primary_user = EnvironmentUser() +envCreateParams.primary_user.name = "delphix\delphix_admin" +envCreateParams.primary_user.credential = PasswordCredential() +envCreateParams.primary_user.credential.password = "delphix" +envCreateParams.host_environment = WindowsHostEnvironment() +envCreateParams.host_environment.name = "WINDOWSSOURCE" +envCreateParams.host_environment.proxy = "WINDOWS_HOST-6" #This is the Host ID of the Windows Server that houses the connector +envCreateParams.host_parameters = WindowsHostCreateParameters() +envCreateParams.host_parameters.host = WindowsHost() +envCreateParams.host_parameters.host.address = "WINDOWSSOURCE" + +environment.create(server, envCreateParams) diff --git a/v1_8_2/delphix_admin_setup.py b/v1_8_2/delphix_admin_setup.py new file mode 100755 index 0000000..225437f --- /dev/null +++ b/v1_8_2/delphix_admin_setup.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python +''' +Adam Bowen - Jan 2016 +This script configures the delphix_admin user after domain0 is configured +Will come back and properly throw this with logging, etc +''' +VERSION="v.2.3.002" +CONTENTDIR="/u02/app/content" + +import getopt +import logging +from os.path import basename +import signal +import sys +import time +import traceback +import untangle + +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.exceptions import HttpError, JobError +from delphixpy.web import user +from delphixpy.web.vo import CredentialUpdateParameters, PasswordCredential, User + + +def serversess(f_engine_address, f_engine_username, f_engine_password): + ''' + Function to grab the server session + ''' + server_session= DelphixEngine(f_engine_address, f_engine_username, f_engine_password, "DOMAIN") + return server_session + +def help(): + print("\n" + basename(__file__)+ " [-e ] [-o - Engine must be up, unconfigured, and console screen must be green") + print("-o - will use this password to initially access the system") + print("-p - will set the delphix_admin user to this password") + print("-v - Print version information and exit") + sys.exit(2) + +def logging_est(): + ''' + Establish Logging + ''' + global debug + logging.basicConfig(filename='landshark_setup.log',format='%(levelname)s:%(asctime)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') + print_info("Welcome to " + basename(__file__) + ", version " + VERSION) + global logger + debug = True + logger = logging.getLogger() + logger.setLevel(10) + print_info("Debug Logging is enabled.") + +def on_exit(sig, func=None): + print_info("Shutdown Command Received") + print_info("Shutting down prime_setup.py") + sys.exit(0) + +def print_debug(print_obj): + ''' + DEBUG Log-level + ''' + if debug == True: + print "DEBUG: " + str(print_obj) + logging.debug(str(print_obj)) + +def print_error(print_obj): + ''' + ERROR Log-level + ''' + print "ERROR: " + str(print_obj) + logging.error(str(print_obj)) + +def print_info(print_obj): + ''' + INFO Log-level + ''' + print "INFO: " + str(print_obj) + logging.info(str(print_obj)) + +def print_warning(print_obj): + ''' + WARNING Log-level + ''' + print "WARNING: " + str(print_obj) + logging.warning(str(print_obj)) + +def set_exit_handler(func): + signal.signal(signal.SIGTERM, func) + +def time_elapsed(): + elapsed_minutes = round((time.time() - time_start)/60, +1) + return elapsed_minutes + +def version(): + print("Version: " +VERSION) + logging_est() + set_exit_handler(on_exit) + sys.exit(1) + +def main(argv): + try: + logging_est() + global time_start + time_start = time.time() + engine_ip = "" + engine_pass = "" + old_engine_pass = "" + try: + opts,args = getopt.getopt(argv,"e:o:p:hv") + except getopt.GetoptError: + help() + for opt, arg in opts: + if opt == '-h': + help() + elif opt == '-e': + engine_ip = arg + elif opt == '-o': + old_engine_pass = arg + elif opt == '-p': + engine_pass = arg + elif opt == '-v': + version() + + if (engine_ip == "" or engine_pass == "" or old_engine_pass == "") : + help() + + server = serversess(engine_ip, "delphix_admin", old_engine_pass) + + if user.get(server, "USER-2").email_address == None: + print_debug("Setting delphix_admin's email address") + delphix_admin_user = User() + delphix_admin_user.email_address = "spam@delphix.com" + user.update(server, 'USER-2', delphix_admin_user) + + print_debug("Setting delphix_admin's password") + delphix_admin_credupdate = CredentialUpdateParameters() + delphix_admin_credupdate.new_credential = PasswordCredential() + delphix_admin_credupdate.new_credential.password = engine_pass + user.update_credential(server, 'USER-2', delphix_admin_credupdate) + else: + print_info("The delphix_admin user has already been setup") + + except SystemExit as e: + sys.exit(e) + except HttpError as e: + print_error("Connection failed to the Delphix Engine") + print_error( "Please check the ERROR message below") + print_error(e.message) + sys.exit(2) + except JobError as e: + print_error("A job failed in the Delphix Engine") + print_error(e.job) + elapsed_minutes = time_elapsed() + print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") + except KeyboardInterrupt: + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") + except: + print_error(sys.exc_info()[0]) + print_error(traceback.format_exc()) + elapsed_minutes = time_elapsed() + print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/v1_8_2/delphix_snapshot_group_will_plugin.py b/v1_8_2/delphix_snapshot_group_will_plugin.py new file mode 100755 index 0000000..769448f --- /dev/null +++ b/v1_8_2/delphix_snapshot_group_will_plugin.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +#For use with HipChat and Will +#https://github.com/skoczen/will + +from will.plugin import WillPlugin +from will.decorators import respond_to, periodic, hear, randomly, route, rendered_template, require_settings +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.web import group, database +from delphixpy import job_context + +class DelphixSnapshotPlugin(WillPlugin): + + @respond_to("snapshot_group (?P.*)") + def snapshot_group_will(self, message, v_object=None): + group_name = v_object + #database_name = "Employee DB - Dev" + + server_session = DelphixEngine("landsharkengine", "delphix_admin", "landshark", "DOMAIN") + + all_groups = group.get_all(server_session) + + for each in all_groups: + if group_name == each.name: + group_reference = each.reference + break + + database_objs = database.get_all(server_session, group=group_reference) + + with job_context.async(server_session): + for obj in database_objs: + database.sync(server_session, obj.reference) diff --git a/v1_8_2/delphix_will_plugin.py b/v1_8_2/delphix_will_plugin.py new file mode 100755 index 0000000..ff42545 --- /dev/null +++ b/v1_8_2/delphix_will_plugin.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python +#For use with HipChat and Will +#https://github.com/skoczen/will + +from will.plugin import WillPlugin +from will.decorators import respond_to, periodic, hear, randomly, route, rendered_template, require_settings +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.web import database +import imp, subprocess, shlex + +VERSION=0.001 + + + +class DelphixPlugin(WillPlugin): + + @respond_to("listvdbs") + def list_databases_will(self, message): + foo = imp.load_source('list_all_databases', 'delphixpy-examples/list_all_databases.py') + vdblist="\n".join(each.name for each in foo.all_databases) + will_response = "There are " + str(len(foo.all_databases)) + " databases in the LandsharkEngine\n" + vdblist + self.reply(message, will_response) + + @respond_to("snapshot (?P.*)") + def snapshot_databases_will(self, message, v_object=None): + if " in " not in v_object: + will_response="Please specify group with request. For example:\n \ + snapshot Employee Oracle 11G DB in Sources" + self.reply(message, will_response) + else: + v_object = v_object.split(' in ',1) + vdb_name = v_object[0] + vdb_group = v_object[1] + self.reply(message, "Snapping " + vdb_name + ". Will let you know when it is complete.") + p = subprocess.Popen(['python', 'delphixpy-examples/dx_snapshot_db.py', '--group', vdb_group, '--name', \ + vdb_name, '--config', 'delphixpy-examples/dxtools.conf'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + self.reply(message, vdb_name + " Snapshot Complete\n" + p.stdout.read()) + + @respond_to("provision vdb (?P.*)") + def provision_databases_will(self, message, v_object=None): + provision_parameters = shlex.split('python delphixpy-examples/dx_provision_vdb.py --config delphixpy-examples/dxtools.conf ' + v_object) + self.reply(message, str(provision_parameters)) + self.reply(message, "Executing provision job") + p = subprocess.Popen(provision_parameters, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + self.reply(message, "Provision Request Complete\n" + p.stdout.read()) + + @respond_to("delete vdb (?P.*)") + def delete_databases_will(self, message, v_object=None): + if " in " not in v_object: + will_response="Please specify group with request. For example:\n \ + delete Employee Oracle 11G DB in Sources" + self.reply(message, will_response) + else: + v_object = v_object.split(' in ',1) + vdb_name = v_object[0] + vdb_group = v_object[1] + self.reply(message, "Deleting " + vdb_name + ". Will let you know when it is complete.") + p = subprocess.Popen(['python', 'delphixpy-examples/dx_delete_vdb.py', '--group', vdb_group, '--name', \ + vdb_name, '--config', 'delphixpy-examples/dxtools.conf'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + self.reply(message, vdb_name + " Delete Complete\n" + p.stdout.read()) + + @respond_to("refresh vdb (?P.*)") + def refresh_vdbs_will(self, message, v_object=None): + if " in " not in v_object: + will_response="Please specify group with request. For example:\n \ + refresh autoprod in Analytics" + self.reply(message, will_response) + else: + v_object = v_object.split(' in ',1) + vdb_name = v_object[0] + vdb_group = v_object[1] + self.reply(message, "Refreshing " + vdb_name + ". Will let you know when it is complete.") + p = subprocess.Popen(['python', 'delphixpy-examples/dx_refresh_db.py', '--group', vdb_group, '--name', \ + vdb_name, '--config', 'delphixpy-examples/dxtools.conf', '--timestamp', '@2016-10-14T20:55:05.995Z'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + self.reply(message, vdb_name + " Refresh Complete\n" + p.stdout.read()) + + @respond_to("refresh jetstream (?P.*)") + def refresh_jetstream_will(self, message, v_object=None): + if " in " not in v_object: + will_response="Please specify group with request. For example:\n \ + refresh jetstream Sugar Automated Testing Container in Masked SugarCRM Application" + self.reply(message, will_response) + else: + v_object = v_object.split(' in ',1) + container_name = v_object[0] + container_template = v_object[1] + self.reply(message, "Refreshing Jetstream Container: " + container_name + ". Will let you know when it is complete.") + p = subprocess.Popen(['python', 'delphixpy-examples/dx_jetstream_container.py', '--operation', 'refresh', \ + '--template', container_template, '--container', container_name, '--config', 'delphixpy-examples/dxtools.conf'], \ + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + self.reply(message, container_name + " Refresh Complete\n" + p.stdout.read()) + + @respond_to("bonjour") + def say_bonjour_will(self, message): + """bonjour: Landshark parles the Francais!""" + self.reply(message, "bonjour! Je m'appelle Landshark! Je suis pret a travailler!") diff --git a/v1_8_2/dx_authorization.py b/v1_8_2/dx_authorization.py new file mode 100755 index 0000000..9672083 --- /dev/null +++ b/v1_8_2/dx_authorization.py @@ -0,0 +1,447 @@ +#!/usr/bin/env python +# Corey Brune - Oct 2016 +# Creates an authorization object +# requirements +# pip install docopt delphixpy + +# The below doc follows the POSIX compliant standards and allows us to use +# this doc to also define our arguments for the script. +"""List, create or remove authorizations for a Virtualization Engine +Usage: + dx_authorization.py (--create --role --target_type --target --user | --list | --delete --role --target_type --target --user ) + [--engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_authorization.py -h | --help | -v | --version +List, delete and create authentication objects + +Examples: + dx_authorization.py --engine landsharkengine --create --role Data --user dev_user --target_type database --target test_vdb + dx_authorization.py --engine landsharkengine --create --role Data --user dev_user --target_type group --target Sources + dx_authorization.py --list + dx_authorization.py --delete --role Data --user dev_user --target_type database --target test_vdb + +Options: + --create Create an authorization + --role Role for authorization. Valid Roles are Data, + Read, Jet Stream User, OWNER, PROVISIONER + --target Target object for authorization + --target_type Target type. Valid target types are snapshot, + group, database + --user User for the authorization + --list List all authorizations + --delete Delete authorization + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_authorization.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = 'v.0.0.015' + +from docopt import docopt +from os.path import basename +import sys +from time import sleep, time +import traceback + +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy.exceptions import HttpError +from delphixpy.web import database +from delphixpy.web import job +from delphixpy.web import role +from delphixpy.web import authorization +from delphixpy.web import user +from delphixpy.web import snapshot +from delphixpy.web import group +from delphixpy.web.vo import User +from delphixpy.web.vo import Authorization + +from lib.DlpxException import DlpxException +from lib.GetSession import GetSession +from lib.GetReferences import find_obj_by_name +from lib.DxLogging import logging_est +from lib.DxLogging import print_info +from lib.DxLogging import print_debug +from lib.DxLogging import print_exception + + +def create_authorization(dlpx_obj, role_name, target_type, target_name, + user_name): + """ + Function to start, stop, enable or disable a VDB + + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession + :param role_name: Name of the role + :param target_type: Supports snapshot, group and database target types + :param target_name: Name of the target + :param user_name: User for the authorization + """ + + authorization_obj = Authorization() + print_debug('Searching for {}, {} and {} references.\n'.format( + role_name, target_name, user_name)) + try: + authorization_obj.role = find_obj_by_name(dlpx_obj.server_session, role, + role_name).reference + authorization_obj.target = find_target_type(dlpx_obj, target_type, + target_name).reference + authorization_obj.user = find_obj_by_name(dlpx_obj.server_session, user, + user_name).reference + authorization.create(dlpx_obj.server_session, authorization_obj) + except (RequestError, HttpError, JobError) as e: + print_exception('An error occurred while creating authorization:\n' + '{}'.format(e)) + print 'Authorization successfully created for {}.'.format(user_name) + + +def delete_authorization(dlpx_obj, role_name, target_type, target_name, + user_name): + """ + Function to delete a given authorization + + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession + :param role_name: Name of the role + :type role_name: basestring + :param target_type: Supports snapshot, group and database target types + :type target_type basestring + :param target_name: Name of the target + :type target_name: basestring + :param user_name: User for the authorization + :type user_name: basestring + """ + target_obj = find_target_type(dlpx_obj, target_type, target_name) + user_obj = find_obj_by_name(dlpx_obj.server_session, user, + user_name) + role_obj = find_obj_by_name(dlpx_obj.server_session, role, + role_name) + auth_objs = authorization.get_all(dlpx_obj.server_session) + + try: + + del_auth_str = '({}, {}, {})'.format(user_obj.reference, + role_obj.reference, + target_obj.reference) + for auth_obj in auth_objs: + if auth_obj.name == del_auth_str: + authorization.delete(dlpx_obj.server_session, + auth_obj.reference) + except DlpxException as e: + print_exception('ERROR: Could not delete authorization:\n{}'.format(e)) + print '{} for user {} was deleted successfully'.format(target_name, + user_name) + + +def find_target_type(dlpx_obj, target_type, target_name): + """ + Function to find the target authorization + + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession + :param target_type: Type of target for authorization + :param target_name: Name of the target + """ + + target_obj = None + try: + if target_type.lower() == 'group': + target_obj = find_obj_by_name(dlpx_obj.server_session, group, + target_name) + elif target_type.lower() == 'database': + target_obj = find_obj_by_name(dlpx_obj.server_session, database, + target_name) + elif target_type.lower() == 'snapshot': + target_obj = find_obj_by_name(dlpx_obj.server_session, snapshot, + target_name) + except (DlpxException, RequestError, HttpError) as e: + print_exception('Could not find authorization target type ' + '{}:\n{}'.format(target_type, e)) + return target_obj + + +def list_authorization(dlpx_obj): + """ + Function to list authorizations for a given engine + + :param dlpx_obj: Virtualization Engine session object + """ + target_obj = None + + try: + auth_objs = authorization.get_all(dlpx_obj.server_session) + print_info('User, Role, Target, Reference') + for auth_obj in auth_objs: + role_obj = role.get(dlpx_obj.server_session, auth_obj.role) + user_obj = user.get(dlpx_obj.server_session, auth_obj.user) + if auth_obj.target.startswith('USER'): + target_obj = user.get(dlpx_obj.server_session, auth_obj.target) + elif auth_obj.target.startswith('GROUP'): + target_obj = group.get(dlpx_obj.server_session, auth_obj.target) + elif auth_obj.target.startswith('DOMAIN'): + target_obj = User() + target_obj.name = 'DOMAIN' + print '{}, {}, {}, {}'.format(user_obj.name, role_obj.name, + target_obj.name, + auth_obj.reference) + except (RequestError, HttpError, JobError, AttributeError) as e: + print_exception('An error occurred while listing authorizations.:\n' + '{}\n'.format((e))) + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + E.g.: + @run_async + def task1(): + do_something + @run_async + def task2(): + do_something_too + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +@run_async +def main_workflow(engine, dlpx_obj): + """ + This function actually runs the jobs. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + + engine: Dictionary of engines + :type engine: dict + dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession + + """ + + try: + # Setup the connection to the Delphix Engine + dlpx_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + except DlpxException as e: + print_exception('ERROR: js_bookmark encountered an error authenticating' + ' to {} {}:\n{}\n'.format(engine['hostname'], + arguments['--target'], e)) + thingstodo = ["thingtodo"] + try: + with dlpx_obj.job_mode(single_thread): + while (len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0): + if len(thingstodo) > 0: + if arguments['--create']: + create_authorization(dlpx_obj, arguments['--role'], + arguments['--target_type'], + arguments['--target'], + arguments['--user']) + elif arguments['--delete']: + delete_authorization(dlpx_obj, arguments['--role'], + arguments['--target_type'], + arguments['--target'], + arguments['--user']) + elif arguments['--list']: + list_authorization(dlpx_obj) + thingstodo.pop() + # get all the jobs, then inspect them + i = 0 + for j in dlpx_obj.jobs.keys(): + job_obj = job.get(dlpx_obj.server_session, + dlpx_obj.jobs[j]) + print_debug(job_obj) + print_info('{}: : {}'.format( + engine['hostname'], job_obj.job_state)) + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + # If the job is in a non-running state, remove it + # from the running jobs list. + del dlpx_obj.jobs[j] + elif job_obj.job_state in 'RUNNING': + # If the job is in a running state, increment the + # running job count. + i += 1 + print_info('{}: {:d} jobs running.'.format( + engine['hostname'], i)) + # If we have running jobs, pause before repeating the + # checks. + if len(dlpx_obj.jobs) > 0: + sleep(float(arguments['--poll'])) + except (DlpxException, RequestError, JobError, HttpError) as e: + print_exception('\nError in dx_authorization: {}\n{}'.format( + engine['hostname'], e)) + sys.exit(1) + + +def run_job(dlpx_obj, config_file_path): + """ + This function runs the main_workflow aynchronously against all the + servers specified + + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession + :param config_file_path: string containing path to configuration file. + :type config_file_path: str + """ + + # Create an empty list to store threads we create. + threads = [] + engine = None + + # If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info('Executing against all Delphix Engines in the dxtools.conf') + try: + # For each server in the dxtools.conf... + for delphix_engine in dlpx_obj.dlpx_engines: + engine = dlpx_obj.dlpx_engines[delphix_engine] + # Create a new thread and add it to the list. + threads.append(main_workflow(engine, dlpx_obj)) + except DlpxException as e: + print_exception('Error encountered in run_job():\n{}'.format(e)) + sys.exit(1) + + elif arguments['--all'] is False: + # Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dlpx_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: {}\n'.format( + arguments['--engine'])) + except (DlpxException, RequestError, KeyError): + raise DlpxException('\nERROR: Delphix Engine {} cannot be ' + 'found in {}. Please check your value and' + ' try again. Exiting.\n'.format( + arguments['--engine'], config_file_path)) + else: + # Else search for a default engine in the dxtools.conf + for delphix_engine in dlpx_obj.dlpx_engines: + if dlpx_obj.dlpx_engines[delphix_engine]['default'] == 'true': + engine = dlpx_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: {}'.format( + dlpx_obj.dlpx_engines[delphix_engine]['hostname'])) + break + + if engine is None: + raise DlpxException('\nERROR: No default engine found. Exiting') + + # run the job against the engine + threads.append(main_workflow(engine, dlpx_obj)) + + # For each thread in the list... + for each in threads: + # join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def time_elapsed(time_start): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + + :param time_start: float containing start time of the script. + """ + return round((time() - time_start)/60, +1) + + +def main(): + # We want to be able to call on these variables anywhere in the script. + global single_thread + global debug + + time_start = time() + single_thread = False + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + config_file_path = arguments['--config'] + # Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + # This is the function that will handle processing main_workflow for + # all the servers. + run_job(dx_session_obj, config_file_path) + + elapsed_minutes = time_elapsed(time_start) + print_info('script took {:.2f} minutes to get this far.'.format( + elapsed_minutes)) + + # Here we handle what we do when the unexpected happens + except SystemExit as e: + # This is what we use to handle our sys.exit(#) + sys.exit(e) + + except DlpxException as e: + # We use this exception handler when an error occurs in a function call. + print_exception('ERROR: Please check the ERROR message below:\n' + '{}'.format(e.message)) + sys.exit(2) + + except HttpError as e: + # We use this exception handler when our connection to Delphix fails + print_exception('ERROR: Connection failed to the Delphix Engine. Please' + 'check the ERROR message below:\n{}'.format(e.message)) + sys.exit(2) + + except JobError as e: + # We use this exception handler when a job fails in Delphix so that we + # have actionable data + print_exception('A job failed in the Delphix Engine:\n{}'.format(e.job)) + elapsed_minutes = time_elapsed(time_start) + print_exception('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + sys.exit(3) + + except KeyboardInterrupt: + # We use this exception handler to gracefully handle ctrl+c exits + print_debug('You sent a CTRL+C to interrupt the process') + elapsed_minutes = time_elapsed(time_start) + print_info('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + except: + # Everything else gets caught here + print_exception('{}\n{}'.format(sys.exc_info()[0], + traceback.format_exc())) + elapsed_minutes = time_elapsed(time_start) + print_info("{} took {:.2f} minutes to get this far".format( + basename(__file__), elapsed_minutes)) + sys.exit(1) + + +if __name__ == "__main__": + # Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + + # Feed our arguments to the main function, and off we go! + main() diff --git a/v1_8_2/dx_database.py b/v1_8_2/dx_database.py new file mode 100755 index 0000000..5216052 --- /dev/null +++ b/v1_8_2/dx_database.py @@ -0,0 +1,311 @@ +#!/usr/bin/env python +# Corey Brune - Feb 2017 +#Description: +# This is a skeleton script which has all of the common functionality. +# The developer will only need to add the necessary arguments and functions +# then make the function calls in main_workflow(). +#Requirements +#pip install docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. +"""Description +Usage: + dx_skel.py () + [--engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_skel.py -h | --help | -v | --version +Description + +Examples: + + +Options: + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_operations_vdb.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = 'v.0.0.000' + +import sys +from os.path import basename +from time import sleep, time +from docopt import docopt + +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy.web import job + +from lib.DlpxException import DlpxException +from lib.DxLogging import logging_est +from lib.DxLogging import print_debug +from lib.DxLogging import print_info +from lib.DxLogging import print_exception +from lib.GetReferences import find_obj_by_name +from lib.GetSession import GetSession + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + E.g.: + @run_async + def task1(): + do_something + @run_async + def task2(): + do_something_too + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +@run_async +def main_workflow(engine): + """ + This function actually runs the jobs. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + + engine: Dictionary of engines + """ + try: + #Setup the connection to the Delphix Engine + dx_session_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + + if arguments['--vdb']: + #Get the database reference we are copying from the database name + database_obj = find_obj_by_name(dx_session_obj.server_session, + database, arguments['--vdb']) + + except DlpxException as e: + print_exception('\nERROR: Engine {} encountered an error while' + '{}:\n{}\n'.format(engine['hostname'], + arguments['--target'], e)) + sys.exit(1) + + thingstodo = ["thingtodo"] + try: + with dx_session_obj.job_mode(single_thread): + while (len(dx_session_obj.jobs) > 0 or len(thingstodo)> 0): + if len(thingstodo) > 0: + if OPERATION: + method_call + + elif OPERATION: + method_call + thingstodo.pop() + # get all the jobs, then inspect them + i = 0 + for j in dx_session_obj.jobs.keys(): + job_obj = job.get(dx_session_obj.server_session, + dx_session_obj.jobs[j]) + print_debug(job_obj) + print_info('{}: Replication operations: {}'.format( + engine['hostname'], job_obj.job_state)) + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + # If the job is in a non-running state, remove it + # from the + # running jobs list. + del dx_session_obj.jobs[j] + elif job_obj.job_state in 'RUNNING': + # If the job is in a running state, increment the + # running job count. + i += 1 + print_info('{}: {:d} jobs running.'.format( + engine['hostname'], i)) + # If we have running jobs, pause before repeating the + # checks. + if len(dx_session_obj.jobs) > 0: + sleep(float(arguments['--poll'])) + + except (HttpError, RequestError, JobError, DlpxException) as e: + print_exception('ERROR: Could not complete replication ' + 'operation:{}'.format(e)) + + +def run_job(): + """ + This function runs the main_workflow aynchronously against all the servers + specified + """ + #Create an empty list to store threads we create. + threads = [] + engine = None + + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + + try: + #For each server in the dxtools.conf... + for delphix_engine in dx_session_obj.dlpx_engines: + engine = dx_session_obj[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + + except DlpxException as e: + print 'Error encountered in run_job():\n{}'.format(e) + sys.exit(1) + + elif arguments['--all'] is False: + #Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dx_session_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: {}\n'.format( + (arguments['--engine']))) + + except (DlpxException, RequestError, KeyError) as e: + raise DlpxException('\nERROR: Delphix Engine {} cannot be ' + 'found in {}. Please check your value ' + 'and try again. Exiting.\n'.format( + arguments['--engine'], config_file_path)) + + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dx_session_obj.dlpx_engines: + if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ + 'true': + + engine = dx_session_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: {}'.format( + dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) + + break + + if engine == None: + raise DlpxException("\nERROR: No default engine found. Exiting") + + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + #elapsed_minutes = round((time() - time_start)/60, +1) + #return elapsed_minutes + return round((time() - time_start)/60, +1) + + +def main(arguments): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global usebackup + global time_start + global config_file_path + global dx_session_obj + global debug + + if arguments['--debug']: + debug = True + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + single_thread = False + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + #This is the function that will handle processing main_workflow for + # all the servers. + run_job() + + elapsed_minutes = time_elapsed() + print_info('script took {:d} minutes to get this far.'.format( + elapsed_minutes)) + + #Here we handle what we do when the unexpected happens + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print_exception('Connection failed to the Delphix Engine' + 'Please check the ERROR message:\n{}'.format(e)) + sys.exit(1) + + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so that + we have actionable data + """ + elapsed_minutes = time_elapsed() + print_exception('A job failed in the Delphix Engine') + print_info('{} took {:.2f} minutes to get this far\n{}'.format( + basename(__file__), elapsed_minutes, e)) + sys.exit(3) + + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far\n'.format( + basename(__file__), elapsed_minutes)) + + except: + """ + Everything else gets caught here + """ + print_exception(sys.exc_info()[0]) + elapsed_minutes = time_elapsed() + print_info('{} took {.2f} minutes to get this far\n'.format( + basename(__file__), elapsed_minutes)) + sys.exit(1) + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + #Feed our arguments to the main function, and off we go! + main(arguments) diff --git a/v1_8_2/dx_delete_vdb.py b/v1_8_2/dx_delete_vdb.py new file mode 100755 index 0000000..7584940 --- /dev/null +++ b/v1_8_2/dx_delete_vdb.py @@ -0,0 +1,573 @@ +#!/usr/bin/env python +#Adam Bowen - Apr 2016 +#This script deletes a vdb +#requirements +#pip install docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. This thing is brilliant. +"""Delete a VDB + +Usage: + dx_delete_db.py (--group [--name ] | --all_dbs ) + [-d | --engine | --all] + [--usebackup] [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_delete_db.py (--host [--group ] [--object_type ] + | --object_type [--group ] [--host ] ) + [-d | --engine | --all] + [--usebackup] [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_delete_db.py -h | --help | -v | --version + +Delete a VDB + +Examples: + dx_delete_db.py --group "Sources" --object_type dsource --usebackup + dx_delete_db.py --name "Employee Oracle 11G DB" + dx_delete_db.py --host LINUXSOURCE --parallel 2 --usebackup + dx_delete_db.py --host LINUXSOURCE --parallel 4 --usebackup --debug -d landsharkengine + + + +Options: + -d Identifier of Delphix engine in dxtools.conf. + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --all_dbs Run against all database objects + --name Name of object in Delphix to execute against. + --group Name of group in Delphix to execute against. + --host Name of environment in Delphix to execute against. + --object_type dsource or vdb. + --usebackup Snapshot using "Most Recent backup". + Available for MSSQL and ASE only. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_snapshot_db.log] + -h --help Show this screen. + -v --version Show version. + +""" + +VERSION="v.0.0.001" + + +from docopt import docopt +import logging +from os.path import basename +import signal +import sys +import time +import traceback +import json + +from multiprocessing import Process +from time import sleep, time + +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.exceptions import HttpError, JobError +from delphixpy import job_context +from delphixpy.web import database, environment, group, job, source, user +from delphixpy.web.vo import ASESpecificBackupSyncParameters, ASENewBackupSyncParameters, ASELatestBackupSyncParameters, MSSqlSyncParameters + + +def find_obj_by_name(engine, server, f_class, obj_name): + """ + Function to find objects by name and object class, and return object's reference as a string + You might use this function to find objects like groups. + """ + print_debug(engine["hostname"] + ": Searching objects in the " + f_class.__name__ + " class\n for one named \"" + obj_name +"\"") + obj_ref = '' + + all_objs = f_class.get_all(server) + for obj in all_objs: + if obj.name == obj_name: + print_debug(engine["hostname"] + ": Found a match " + str(obj.reference)) + return obj + +def find_all_databases_by_group_name(engine, server, group_name, exclude_js_container=False): + """ + Easy way to quickly find databases by group name + """ + + #First search groups for the name specified and return its reference + group_obj = find_obj_by_name(engine, server, group, group_name) + if group_obj: + databases=database.get_all(server, group=group_obj.reference, no_js_container_data_source=exclude_js_container) + return databases + +def find_database_by_name_and_group_name(engine, server, group_name, database_name): + + databases = find_all_databases_by_group_name(engine, server, group_name) + + for each in databases: + if each.name == database_name: + print_debug(engine["hostname"] + ": Found a match " + str(each.reference)) + return each + print_info("Unable to find \"" + database_name + "\" in " + group_name) + +def find_source_by_database(engine, server, database_obj): + #The source tells us if the database is enabled/disables, virtual, vdb/dSource, or is a staging database. + source_obj = source.get_all(server, database=database_obj.reference) + #We'll just do a little sanity check here to ensure we only have a 1:1 result. + if len(source_obj) == 0: + print_error(engine["hostname"] + ": Did not find a source for " + database_obj.name + ". Exiting") + sys.exit(1) + elif len(source_obj) > 1: + print_error(engine["hostname"] + ": More than one source returned for " + database_obj.name + ". Exiting") + print_error(source_obj) + sys.exit(1) + return source_obj + +def get_config(config_file_path): + """ + This function reads in the dxtools.conf file + """ + #First test to see that the file is there and we can open it + try: + config_file = open(config_file_path).read() + except: + print_error("Was unable to open " + config_file_path + ". Please check the path and permissions, then try again.") + sys.exit(1) + #Now parse the file contents as json and turn them into a python dictionary, throw an error if it isn't proper json + try: + config = json.loads(config_file) + except: + print_error("Was unable to read " + config_file_path + " as json. Please check file in a json formatter and try again.") + sys.exit(1) + #Create a dictionary of engines (removing the data node from the dxtools.json, for easier parsing) + delphix_engines = {} + for each in config['data']: + delphix_engines[each['hostname']] = each + print_debug(delphix_engines) + return delphix_engines + +def logging_est(logfile_path): + """ + Establish Logging + """ + global debug + logging.basicConfig(filename=logfile_path,format='%(levelname)s:%(asctime)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') + print_info("Welcome to " + basename(__file__) + ", version " + VERSION) + global logger + debug = arguments['--debug'] + logger = logging.getLogger() + if debug == True: + logger.setLevel(10) + print_info("Debug Logging is enabled.") + +def job_mode(server): + """ + This function tells Delphix how to execute jobs, based on the single_thread variable at the beginning of the file + """ + #Synchronously (one at a time) + if single_thread == True: + job_m = job_context.sync(server) + print_debug("These jobs will be executed synchronously") + #Or asynchronously + else: + job_m = job_context.async(server) + print_debug("These jobs will be executed asynchronously") + return job_m + +def job_wait(): + """ + This job stops all work in the thread/process until jobs are completed. + """ + #Grab all the jos on the server (the last 25, be default) + all_jobs = job.get_all(server) + #For each job in the list, check to see if it is running (not ended) + for jobobj in all_jobs: + if not (jobobj.job_state in ["CANCELED", "COMPLETED", "FAILED"]): + print_debug("Waiting for " + jobobj.reference + " (currently: " + jobobj.job_state+ ") to finish running against the container") + #If so, wait + job_context.wait(server,jobobj.reference) + +def on_exit(sig, func=None): + """ + This function helps us end cleanly and with exit codes + """ + print_info("Shutdown Command Received") + print_info("Shutting down " + basename(__file__)) + sys.exit(0) + +def print_debug(print_obj): + """ + Call this function with a log message to prefix the message with DEBUG + """ + try: + if debug == True: + print "DEBUG: " + str(print_obj) + logging.debug(str(print_obj)) + except: + pass + +def print_error(print_obj): + """ + Call this function with a log message to prefix the message with ERROR + """ + print "ERROR: " + str(print_obj) + logging.error(str(print_obj)) + +def print_info(print_obj): + """ + Call this function with a log message to prefix the message with INFO + """ + print "INFO: " + str(print_obj) + logging.info(str(print_obj)) + +def print_warning(print_obj): + """ + Call this function with a log message to prefix the message with WARNING + """ + print "WARNING: " + str(print_obj) + logging.warning(str(print_obj)) + +def serversess(f_engine_address, f_engine_username, f_engine_password): + """ + Function to setup the session with the Delphix Engine + """ + server_session= DelphixEngine(f_engine_address, f_engine_username, f_engine_password, "DOMAIN") + return server_session + +def set_exit_handler(func): + """ + This function helps us set the correct exit code + """ + signal.signal(signal.SIGTERM, func) + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + + E.g.: + @run_async + def task1(): + do_something + + @run_async + def task2(): + do_something_too + + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + +@run_async +def main_workflow(engine): + """ + This function is where the main workflow resides. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + """ + + #Pull out the values from the dictionary for this engine + engine_address = engine["ip_address"] + engine_username = engine["username"] + engine_password = engine["password"] + #Establish these variables as empty for use later + databases = [] + environment_obj = None + source_objs = None + jobs = {} + + + #Setup the connection to the Delphix Engine + server = serversess(engine_address, engine_username, engine_password) + + #If an environment/server was specified + if host_name: + print_debug(engine["hostname"] + ": Getting environment for " + host_name) + #Get the environment object by the hostname + environment_obj = find_obj_by_name(engine, server, environment, host_name) + if environment_obj != None: + #Get all the sources running on the server + env_source_objs = source.get_all(server, environment=environment_obj.reference) + #If the server doesn't have any objects, exit. + if env_source_objs == None: + print_error(host_name + "does not have any objects. Exiting") + sys.exit(1) + #If we are only filtering by the server, then put those objects in the main list for processing + if not(arguments['--group'] and database_name): + source_objs = env_source_objs + all_dbs = database.get_all(server, no_js_container_data_source=False) + databases = [] + for source_obj in source_objs: + if source_obj.staging == False and source_obj.virtual == True: + database_obj = database.get(server, source_obj.container) + if database_obj in all_dbs: + databases.append(database_obj) + else: + print_error(engine["hostname"] + ":No environment found for " + host_name + ". Exiting") + sys.exit(1) + #If we specified a specific database by name.... + if arguments['--name']: + #Get the database object from the name + database_obj = find_database_by_name_and_group_name(engine, server, arguments['--group'], arguments['--name']) + if database_obj: + databases.append(database_obj) + #Else if we specified a group to filter by.... + elif arguments['--group']: + print_debug(engine["hostname"] + ":Getting databases in group " + arguments['--group']) + #Get all the database objects in a group. + databases = find_all_databases_by_group_name(engine, server, arguments['--group']) + #Else, if we said all vdbs ... + elif arguments['--all_dbs'] and not arguments['--host'] : + #Grab all databases + databases = database.get_all(server, no_js_container_data_source=False) + elif arguments['--object_type'] and not arguments['--host'] : + databases = database.get_all(server) + if not databases or len(databases) == 0: + print_error("No databases found with the criterion specified") + return + #reset the running job count before we begin + i = 0 + with job_mode(server): + #While there are still running jobs or databases still to process.... + while (len(jobs) > 0 or len(databases) > 0): + #While there are databases still to process and we are still under + #the max simultaneous jobs threshold (if specified) + while len(databases) > 0 and (arguments['--parallel'] == None or i < int(arguments['--parallel'])): + #Give us the next database in the list, and remove it from the list + database_obj = databases.pop() + #Get the source of the database. + #The source tells us if the database is enabled/disables, virtual, vdb/dSource, or is a staging database. + source_obj = find_source_by_database(engine, server, database_obj) + #If we applied the environment/server filter AND group filter, find the intersecting matches + if environment_obj != None and (arguments['--group']): + match = False + for env_source_obj in env_source_objs: + if source_obj[0].reference in env_source_obj.reference: + match = True + break + if match == False: + print_error(engine["hostname"] + ": " + database_obj.name + " does not exist on " + host_name + ". Exiting") + return + #Snapshot the database + delete_job = delete_database(engine, server, jobs, source_obj[0], database_obj, arguments['--object_type']) + #If delete_job has any value, then we know that a job was initiated. + if delete_job: + #increment the running job count + i += 1 + #Check to see if we are running at max parallel processes, and report if so. + if ( arguments['--parallel'] != None and i >= int(arguments['--parallel'])): + print_info(engine["hostname"] + ": Max jobs reached (" + str(i) + ")") + #reset the running jobs counter, as we are about to update the count from the jobs report. + i = update_jobs_dictionary(engine, server, jobs) + print_info(engine["hostname"] + ": " + str(i) + " jobs running. " + str(len(databases)) + " jobs waiting to run") + #If we have running jobs, pause before repeating the checks. + if len(jobs) > 0: + sleep(float(arguments['--poll'])) + +def run_job(engine): + """ + This function runs the main_workflow aynchronously against all the servers specified + """ + #Create an empty list to store threads we create. + threads = [] + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + #For each server in the dxtools.conf... + for delphix_engine in dxtools_objects: + engine = dxtools_objects[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + else: + #Else if the --engine argument was given, test to see if the engine exists in dxtools.conf + if arguments['--engine']: + try: + engine = dxtools_objects[arguments['--engine']] + print_info("Executing against Delphix Engine: " + arguments['--engine']) + except: + print_error("Delphix Engine \"" + arguments['--engine'] + "\" cannot be found in " + config_file_path) + print_error("Please check your value and try again. Exiting") + sys.exit(1) + #Else if the -d argument was given, test to see if the engine exists in dxtools.conf + elif arguments['-d']: + try: + engine = dxtools_objects[arguments['-d']] + print_info("Executing against Delphix Engine: " + arguments['-d']) + except: + print_error("Delphix Engine \"" + arguments['-d'] + "\" cannot be found in " + config_file_path) + print_error("Please check your value and try again. Exiting") + sys.exit(1) + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dxtools_objects: + if dxtools_objects[delphix_engine]['default'] == 'true': + engine = dxtools_objects[delphix_engine] + print_info("Executing against the default Delphix Engine in the dxtools.conf: " + dxtools_objects[delphix_engine]['hostname']) + break + if engine == None: + print_error("No default engine found. Exiting") + sys.exit(1) + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete before moving on + each.join() + +def delete_database(engine, server, jobs, source_obj, container_obj, obj_type=None): + """ + This function + FYI - Snapshot is also called sync + """ + #Sanity check to make sure our source object has a reference + if source_obj.reference != None : + #If we specified the --object_type flag, ensure this source is a match. Skip, if not. + if obj_type != None and ((obj_type.lower() == "vdb" and source_obj.virtual != True ) or (obj_type.lower() == "dsource" and source_obj.virtual != False )): + print_warning(engine["hostname"] + ": " + container_obj.name + " is not a " + obj_type.lower() + ". Skipping sync") + #Ensure this source is not a staging database. We can't act upon those. + elif source_obj.staging == True: + print_warning(engine["hostname"] + ": " + container_obj.name + " is a staging database. Skipping.") + #Ensure the source is enabled. We can't snapshot disabled databases. + else: + print_info(engine["hostname"] + ": Deleting " + container_obj.name ) + print_debug(engine["hostname"] + ": Type: " + source_obj.type ) + print_debug(engine["hostname"] + ": " +source_obj.type) + #Delete it + database.delete(server, container_obj.reference) + #Add the job into the jobs dictionary so we can track its progress + jobs[container_obj] = server.last_job + #return the job object to the calling statement so that we can tell if a job was created or not (will return None, if no job) + return server.last_job + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + elapsed_minutes = round((time() - time_start)/60, +1) + return elapsed_minutes + +def update_jobs_dictionary(engine, server, jobs): + """ + This function checks each job in the dictionary and updates its status or removes it if the job is complete. + Return the number of jobs still running. + """ + #Establish the running jobs counter, as we are about to update the count from the jobs report. + i = 0 + #get all the jobs, then inspect them + for j in jobs.keys(): + job_obj = job.get(server, jobs[j]) + print_debug(engine["hostname"] + ": " + str(job_obj)) + print_info(engine["hostname"] + ": " + j.name + ": " + job_obj.job_state) + + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + #If the job is in a non-running state, remove it from the running jobs list. + del jobs[j] + else: + #If the job is in a running state, increment the running job count. + i += 1 + return i + +def main(argv): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global usebackup + global time_start + global host_name + global database_name + global config_file_path + global dxtools_objects + + + + try: + #Declare globals that will be used throughout the script. + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + engine = None + single_thread = False + usebackup = arguments['--usebackup'] + database_name = arguments['--name'] + host_name = arguments['--host'] + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dxtools_objects = get_config(config_file_path) + + #This is the function that will handle processing main_workflow for all the servers. + run_job(engine) + + elapsed_minutes = time_elapsed() + print_info("script took " + str(elapsed_minutes) + " minutes to get this far.") + + + #Here we handle what we do when the unexpected happens + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print_error("Connection failed to the Delphix Engine") + print_error( "Please check the ERROR message below") + print_error(e.message) + sys.exit(2) + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so that we have actionable data + """ + print_error("A job failed in the Delphix Engine") + print_error(e.job) + elapsed_minutes = time_elapsed() + print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") + sys.exit(3) + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") + except: + """ + Everything else gets caught here + """ + print_error(sys.exc_info()[0]) + print_error(traceback.format_exc()) + elapsed_minutes = time_elapsed() + print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") + sys.exit(1) + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + #I added this below condition to account for my --name | or AT LEAST ONE OF --group --host --object_type + #I couldn't quite sort it out with docopt. Maybe I'm just dense today. + #Anyway, if none of the four options are given, print the __doc__ and exit. + if not(arguments['--name']) and not(arguments['--group']) and not(arguments['--host']) and not(arguments['--object_type']) and not(arguments['--all_dbs']): + print(__doc__) + sys.exit() + #Feed our arguments to the main function, and off we go! + print arguments + main(arguments) diff --git a/v1_8_2/dx_environment.py b/v1_8_2/dx_environment.py new file mode 100755 index 0000000..8cb987d --- /dev/null +++ b/v1_8_2/dx_environment.py @@ -0,0 +1,697 @@ +#!/usr/bin/env python +#Corey Brune 08 2016 +#This script creates an environment +#requirements +#pip install docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. + +"""Create Host Environment + +Usage: + dx_environment.py (--type --env_name --host_user \ +--ip
[--toolkit ] [--ase --ase_user --ase_pw ] \ +|--update_ase_pw --env_name | --update_ase_user --env_name \ +| --delete | --refresh | --list) +[--logdir ][--debug] [--config ] [--connector_name ] +[--pw ][--engine ][--all] [--poll ] + dx_environment.py (--update_host --old_host_address --new_host_address ) [--logdir ][--debug] [--config ] + dx_environment.py ([--enable]|[--disable]) --env_name [--logdir ][--debug] [--config ] + dx_environment.py -h | --help | -v | --version + +Create a Delphix environment. (current support for standalone environments only) + +Examples: + dx_environment.py --engine landsharkengine --type linux --env_name test1 --host_user delphix --pw delphix --ip 182.1.1.1 --toolkit /var/opt/delphix + dx_environment.py --type linux --env_name test1 --update_ase_pw newPasswd + dx_environment.py --type linux --env_name test1 --host_user delphix --pw delphix --ip 182.1.1.1 --toolkit /var/opt/delphix + dx_environment.py --update_host --host_name 10.0.3.60 + dx_environment.py --type linux --env_name test1 --host_user delphix --pw delphix --ip 182.1.1.1 --toolkit /var/opt/delphix --ase --ase_user sa --ase_pw delphixpw + dx_environment.py --type windows --env_name SOURCE --host_user delphix.local\\administrator --ip 10.0.1.50 --toolkit foo --config dxtools.conf --pw 'myTempPassword123!' --debug --connector_name 10.0.1.60 + dx_environment.py --enable --env_name SOURCE + dx_environment.py --disable --env_name SOURCE + dx_environment.py --list + +Options: + --type The OS type for the environment + --env_name The name of the Delphix environment + --ip The IP address of the Delphix environment + --list List all of the environments for a given engine + --toolkit Path of the toolkit. Required for Unix/Linux + --host_user The username on the Delphix environment + --delete The name of the Delphix environment to delete + --update_ase_pw The new ASE DB password + --refresh The name of the Delphix environment to refresh. Specify "all" to refresh all environments + --pw Password of the user + --connector_name The name of the Delphix connector to use. Required for Windows source environments + --update_ase_user Update the ASE DB username + --ase Flag to enable ASE environments + --ase_user The ASE DB username + --ase_pw Password of the ASE DB user + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --engine Identifier of Delphix engine in dxtools.conf. + + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_environment.log] + -h --help Show this screen. + -v --version Show version. + --update_host Update the host address for an environment + --old_host_address The current name of the host, as registered in Delphix. Required for update_host + --new_host_address The desired name of the host, as registered in Delphix. Required for update_host + --enable Enable the named environment + --disable Disable the named environment + +""" + +VERSION="v.0.3.608" + +from docopt import docopt +from os.path import basename +import sys +import traceback +from time import sleep, time + +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy.web import environment +from delphixpy.web import job +from delphixpy.web import host +from delphixpy.web.vo import UnixHostEnvironment +from delphixpy.web.vo import ASEHostEnvironmentParameters +from delphixpy.web.vo import HostEnvironmentCreateParameters +from delphixpy.web.vo import WindowsHostEnvironment +from delphixpy.web.vo import WindowsHost +from delphixpy.web.vo import UnixHost + +from lib.DlpxException import DlpxException +from lib.GetSession import GetSession +from lib.GetReferences import find_obj_by_name +from lib.GetReferences import find_obj_name +from lib.GetReferences import find_all_objects +from lib.DxLogging import logging_est +from lib.DxLogging import print_info +from lib.DxLogging import print_debug +from lib.DxLogging import print_exception + +def enable_environment(env_name): + """ + Enable the given host + """ + env_obj = find_obj_by_name(dx_session_obj.server_session, + environment, env_name) + + try: + environment.enable(dx_session_obj.server_session,env_obj.reference) + print('Attempting to enable {}'.format(env_name)) + except (DlpxException, RequestError) as e: + print_exception('\nERROR: Enabling the host {} ' + 'encountered an error:\n{}'.format(env_name, e)) + sys.exit(1) + +def disable_environment(env_name): + """ + Enable the given host + """ + env_obj = find_obj_by_name(dx_session_obj.server_session, + environment, env_name) + + try: + environment.disable(dx_session_obj.server_session,env_obj.reference) + print('Attempting to disable {}'.format(env_name)) + except (DlpxException, RequestError) as e: + print_exception('\nERROR: Disabling the host {} ' + 'encountered an error:\n{}'.format(env_name, e)) + sys.exit(1) + +def update_host_address(old_host_address, new_host_address): + """ + Update the given host + """ + + old_host_obj = find_obj_by_name(dx_session_obj.server_session, + host, old_host_address) + if old_host_obj.type == "WindowsHost": + host_obj = WindowsHost() + else: + host_obj = UnixHost() + host_obj.address = new_host_address + try: + host.update(dx_session_obj.server_session, old_host_obj.reference, host_obj) + + print('Attempting to update {} to {}'.format(old_host_address, new_host_address)) + + except (DlpxException, RequestError) as e: + print_exception('\nERROR: Updating the host {} ' + 'encountered an error:\n{}'.format(env_name, e)) + sys.exit(1) + +def list_env(): + """ + List all environments for a given engine + """ + + all_envs = environment.get_all(dx_session_obj.server_session) + for env in all_envs: + env_user = find_obj_name(dx_session_obj.server_session, + environment.user, env.primary_user) + env_host = find_obj_name(dx_session_obj.server_session, + host, env.host) + + #ORACLE CLUSTER does not have env.host + #Windows does not have ASE instances + + print 'Environment Name: {}, Username: {}, Host: {}, Enabled: {}, ' \ + 'ASE Environment Params: {}'.format( + env.name, env_user, env_host, env.enabled, + env.ase_host_environment_parameters if + isinstance(env.ase_host_environment_parameters, + ASEHostEnvironmentParameters) else 'Undefined') + + +def delete_env(engine, env_name): + """ + Deletes an environment + + engine: Dictionary of engines + env_name: Name of the environment to delete + """ + + env_obj = find_obj_by_name(dx_session_obj.server_session, environment, + env_name) + + if env_obj: + environment.delete(dx_session_obj.server_session, env_obj.reference) + dx_session_obj.jobs[engine['hostname']] = \ + dx_session_obj.server_session.last_job + + elif env_obj is None: + print('Environment was not found in the Engine: {}'.format(env_name)) + sys.exit(1) + + +def refresh_env(engine, env_name): + """ + Refresh the environment + + engine: Dictionary of engines + env_name: Name of the environment to refresh + """ + + if env_name == "all": + env_list = find_all_objects(dx_session_obj.server_session, environment) + for env_obj in env_list: + try: + environment.refresh(dx_session_obj.server_session, env_obj.reference) + dx_session_obj.jobs[engine['hostname']] = \ + dx_session_obj.server_session.last_job + + except (DlpxException, RequestError) as e: + print_exception('\nERROR: Refreshing the environment {} ' + 'encountered an error:\n{}'.format(env_name, e)) + sys.exit(1) + else: + + try: + env_obj = find_obj_by_name(dx_session_obj.server_session, environment, + env_name) + + environment.refresh(dx_session_obj.server_session, env_obj.reference) + dx_session_obj.jobs[engine['hostname']] = \ + dx_session_obj.server_session.last_job + + except (DlpxException, RequestError) as e: + print_exception('\nERROR: Refreshing the environment {} ' + 'encountered an error:\n{}'.format(env_name, e)) + sys.exit(1) + + +def update_ase_username(): + """ + Update the ASE database user password + """ + + env_obj = UnixHostEnvironment() + env_obj.ase_host_environment_parameters = ASEHostEnvironmentParameters() + env_obj.ase_host_environment_parameters.db_user = \ + arguments['--update_ase_user'] + + try: + environment.update(dx_session_obj.server_session, find_obj_by_name( + dx_session_obj.server_session, environment, arguments['--env_name'], + env_obj).reference, env_obj) + + except (HttpError, RequestError) as e: + print_exception('\nERROR: Updating the ASE DB password ' + 'failed:\n{}\n'.format(e)) + + +def update_ase_pw(): + """ + Update the ASE database user password + """ + + env_obj = UnixHostEnvironment() + env_obj.ase_host_environment_parameters = ASEHostEnvironmentParameters() + env_obj.ase_host_environment_parameters.credentials = {'type': + 'PasswordCredential', + 'password': + arguments['--update_ase_pw']} + + try: + environment.update(dx_session_obj.server_session, find_obj_by_name( + dx_session_obj.server_session, environment, arguments['--env_name'], + env_obj).reference, env_obj) + + except (HttpError, RequestError) as e: + print_exception('\nERROR: Updating the ASE DB password ' + 'failed:\n{}\n'.format(e)) + + +def create_linux_env(engine, env_name, host_user, ip_addr, toolkit_path, + pw=None): + + """ + Create a Linux environment. + + env_name: The name of the environment + host_user: The server account used to authenticate + ip_addr: DNS name or IP address of the environment + toolkit_path: Path to the toolkit. Note: This directory must be + writable by the host_user + pw: Password of the user. Default: None (use SSH keys instead) + """ + + env_params_obj = HostEnvironmentCreateParameters() + + if pw is None: + print_debug('Creating the environment with SSH Keys',debug) + env_params_obj.primary_user = {'type': 'EnvironmentUser', + 'name': host_user, + 'credential': { + 'type': 'SystemKeyCredential'}} + + else: + print_debug('Creating the environment with a password',debug) + env_params_obj.primary_user = {'type': 'EnvironmentUser', + 'name': host_user, + 'credential': { + 'type': 'PasswordCredential', + 'password': pw }} + + env_params_obj.host_parameters = {'type': 'UnixHostCreateParameters', + 'host': { 'address': ip_addr, + 'type': 'UnixHost', + 'name': env_name, + 'toolkitPath': toolkit_path}} + + env_params_obj.host_environment = UnixHostEnvironment() + env_params_obj.host_environment.name = env_name + + if arguments['--ase']: + env_params_obj.host_environment.ase_host_environment_parameters = \ + ASEHostEnvironmentParameters() + + try: + env_params_obj.host_environment.ase_host_environment_parameters.db_user = \ + arguments['--ase_user'] + env_params_obj.host_environment.ase_host_environment_parameters.credentials = { + 'type': 'PasswordCredential', + 'password': arguments['--ase_pw']} + except KeyError: + print_exception('The --ase_user and --ase_pw arguments are' + ' required with the --ase flag.\n') + + try: + environment.create(dx_session_obj.server_session, + env_params_obj) + dx_session_obj.jobs[engine['hostname']] = \ + dx_session_obj.server_session.last_job + + except (DlpxException, RequestError, HttpError) as e: + print('\nERROR: Encountered an exception while creating the ' + 'environment:\n{}'.format(e)) + except JobError as e: + print_exception('JobError while creating environment {}:\n{}'.format( + e, e.message)) + + +def create_windows_env(engine, env_name, host_user, ip_addr, + pw=None, connector_name=None): + + """ + Create a Windows environment. + + env_name: The name of the environment + host_user: The server account used to authenticate + ip_addr: DNS name or IP address of the environment + toolkit_path: Path to the toolkit. Note: This directory must be + writable by the host_user + pw: Password of the user. Default: None (use SSH keys instead) + """ + + env_params_obj = HostEnvironmentCreateParameters() + + print_debug('Creating the environment with a password',debug) + + env_params_obj.primary_user = {'type': 'EnvironmentUser', + 'name': host_user, + 'credential': { + 'type': 'PasswordCredential', + 'password': pw }} + + env_params_obj.host_parameters = {'type': 'WindowsHostCreateParameters', + 'host': { 'address': ip_addr, + 'type': 'WindowsHost', + 'name': env_name, + 'connectorPort': 9100}} + + env_params_obj.host_environment = WindowsHostEnvironment() + env_params_obj.host_environment.name = env_name + + if connector_name: + env_obj = find_obj_by_name(dx_session_obj.server_session, environment, + connector_name) + + if env_obj: + env_params_obj.host_environment.proxy = env_obj.host + elif env_obj is None: + print('Host was not found in the Engine: {}'.format(arguments[--connector_name])) + sys.exit(1) + + try: + environment.create(dx_session_obj.server_session, + env_params_obj) + dx_session_obj.jobs[engine['hostname']] = \ + dx_session_obj.server_session.last_job + + except (DlpxException, RequestError, HttpError) as e: + print('\nERROR: Encountered an exception while creating the ' + 'environment:\n{}'.format(e)) + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + + E.g.: + @run_async + def task1(): + do_something + + @run_async + def task2(): + do_something_too + + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +@run_async +def main_workflow(engine): + """ + This function actually runs the jobs. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + + """ + + try: + #Setup the connection to the Delphix Engine + dx_session_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + + except DlpxException as e: + print_exception('\nERROR: Engine {} encountered an error while ' + 'provisioning {}:\n{}\n'.format(engine['hostname'], + arguments['--target'], e)) + sys.exit(1) + + thingstodo = ['thingtodo'] + + try: + with dx_session_obj.job_mode(single_thread): + while (len(dx_session_obj.jobs) > 0 or len(thingstodo)> 0): + if len(thingstodo)> 0: + + if arguments['--type'] == 'linux' or arguments['--type'] == 'windows': + env_name = arguments['--env_name'] + host_user = arguments['--host_user'] + pw = arguments['--pw'] + ip_addr = arguments['--ip'] + host_name = arguments['--connector_name'] + if arguments['--type'] == 'linux': + toolkit_path = arguments['--toolkit'] + create_linux_env(engine, env_name, host_user, + ip_addr, toolkit_path, pw) + else: + create_windows_env(engine, env_name, host_user, + ip_addr, pw, host_name,) + + elif arguments['--delete']: + delete_env(engine, arguments['--delete']) + + elif arguments['--refresh']: + refresh_env(engine, arguments['--refresh']) + + elif arguments['--update_ase_pw']: + update_ase_pw() + + elif arguments['--update_ase_user']: + update_ase_username() + elif arguments['--list']: + list_env() + elif arguments['--update_host']: + update_host_address(arguments['--old_host_address'], arguments['--new_host_address']) + elif arguments['--enable']: + enable_environment(arguments['--env_name']) + elif arguments['--disable']: + disable_environment(arguments['--env_name']) + + thingstodo.pop() + + #get all the jobs, then inspect them + i = 0 + for j in dx_session_obj.jobs.keys(): + job_obj = job.get(dx_session_obj.server_session, + dx_session_obj.jobs[j]) + print_debug(job_obj,debug) + print_info('{} Environment: {}'.format( + engine['hostname'], job_obj.job_state)) + + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + #If the job is in a non-running state, remove it from the + # running jobs list. + del dx_session_obj.jobs[j] + else: + #If the job is in a running state, increment the + # running job count. + i += 1 + print_info('{}: {:d} jobs running.\n'.format( + engine['hostname'], i)) + + #If we have running jobs, pause before repeating the checks. + if len(dx_session_obj.jobs) > 0: + sleep(float(arguments['--poll'])) + + except (DlpxException, JobError) as e: + print_exception('\nError while creating the environment {}:' + '\n{}'.format(arguments['--env_name'], e.message)) + sys.exit(1) + + +def run_job(): + """ + This function runs the main_workflow aynchronously against all the + servers specified + """ + engine = None + + #Create an empty list to store threads we create. + threads = [] + + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info('Executing against all Delphix Engines in the dxtools.conf') + + try: + #For each server in the dxtools.conf... + for delphix_engine in dx_session_obj.dlpx_engines: + engine = dx_session_obj[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + + except DlpxException as e: + print_exception('Error encountered in run_job():\n{}'.format(e)) + sys.exit(1) + + elif arguments['--all'] is False: + #Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dx_session_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: {}\n'.format( + arguments['--engine'])) + + except (DlpxException, KeyError) as e: + print_exception('\nERROR: Delphix Engine {} cannot be ' + 'found in {}. Please check your value ' + 'and try again. Exiting.\n{}\n'.format( + arguments['--engine'], config_file_path, e)) + + else: + #Else search for a default engine in the dxtools.conf + #import pdb;pdb.set_trace() + for delphix_engine in dx_session_obj.dlpx_engines: + if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ + 'true': + + engine = dx_session_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: {}'.format( + dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) + + break + + if engine is None: + print_exception('\nERROR: No default engine found. Exiting') + sys.exit(1) + + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + elapsed_minutes = round((time() - time_start)/60, +1) + return elapsed_minutes + + +def main(argv): + global single_thread + global usebackup + global time_start + global config_file_path + global dx_session_obj + global debug + + try: + dx_session_obj = GetSession() + debug = arguments['--debug'] + logging_est(arguments['--logdir'], debug) + print_debug(arguments, debug) + time_start = time() + single_thread = False + config_file_path = arguments['--config'] + + print_info('Welcome to %s version %s' % (basename(__file__), + VERSION)) + + #Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + + #This is the function that will handle processing main_workflow for + # all the servers. + run_job() + + elapsed_minutes = time_elapsed() + print_info('script took %s minutes to get this far. ' % + (str(elapsed_minutes))) + + #Here we handle what we do when the unexpected happens + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + + except DlpxException as e: + """ + We use this exception handler when an error occurs in a function call. + """ + + print('\nERROR: Please check the ERROR message below:\n%s' % + (e.message)) + sys.exit(2) + + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print('\nERROR: Connection failed to the Delphix Engine. Please ' + 'check the ERROR message below:\n%s' % (e.message)) + sys.exit(2) + + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so that we + have actionable data + """ + print('A job failed in the Delphix Engine:\n%s' % (e.job)) + elapsed_minutes = time_elapsed() + print_info(basename(__file__) + " took " + str(elapsed_minutes) + + " minutes to get this far.") + sys.exit(3) + + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug("You sent a CTRL+C to interrupt the process",debug) + elapsed_minutes = time_elapsed() + print_info(basename(__file__) + " took " + str(elapsed_minutes) + + " minutes to get this far.") + + except: + """ + Everything else gets caught here + """ + print(sys.exc_info()[0]) + print(traceback.format_exc()) + elapsed_minutes = time_elapsed() + print_info('%s took %s minutes to get this far' % (basename(__file__), + str(elapsed_minutes))) + sys.exit(1) + + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + #Feed our arguments to the main function, and off we go! + main(arguments) diff --git a/v1_8_2/dx_groups.py b/v1_8_2/dx_groups.py new file mode 100755 index 0000000..dbb75c7 --- /dev/null +++ b/v1_8_2/dx_groups.py @@ -0,0 +1,363 @@ +#!/usr/bin/env python +# Adam Bowen - Aug 2017 +#Description: +# This script will allow you to easily manage groups in Delphix +# +#Requirements +#pip install docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. +"""Description +Usage: + dx_groups.py (--group_name [--add | --delete]) + [--engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_groups.py (--list) + [--engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_groups.py -h | --help | -v | --version +Description + +Examples: + dx_groups.py --debug --config delphixpy-examples/dxtools_1.conf --group_name Test --add + dx_groups.py --config delphixpy-examples/dxtools_1.conf --group_name Test --delete + dx_groups.py --list + +Options: + --group_name The name of the group + --add Add the identified group + --delete Delete the identified group + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_skel.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = 'v.0.0.001' + +import sys +from os.path import basename +from time import sleep, time +from docopt import docopt + +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy.web import job +from delphixpy.web import group +from delphixpy.web.vo import Group + +from lib.DlpxException import DlpxException +from lib.DxLogging import logging_est +from lib.DxLogging import print_debug +from lib.DxLogging import print_info +from lib.DxLogging import print_exception +from lib.GetReferences import find_obj_by_name +from lib.GetReferences import find_all_objects +from lib.GetSession import GetSession + +def add_group(group_name): + """ + This function adds the group + """ + group_obj = Group() + group_obj.name = group_name + + + try: + group.create(dx_session_obj.server_session,group_obj) + print('Attempting to create {}'.format(group_name)) + except (DlpxException, RequestError) as e: + print_exception('\nERROR: Creating the group {} ' + 'encountered an error:\n{}'.format(group_name, e)) + sys.exit(1) + +def delete_group(group_name): + """ + This function adds the group + """ + group_obj = find_obj_by_name(dx_session_obj.server_session, + group, group_name) + + + try: + group.delete(dx_session_obj.server_session,group_obj.reference) + print('Attempting to delete {}'.format(group_name)) + except (DlpxException, RequestError) as e: + print_exception('\nERROR: Deleting the group {} ' + 'encountered an error:\n{}'.format(group_name, e)) + sys.exit(1) + +def list_groups(): + """ + This function lists all groups + """ + group_list = find_all_objects(dx_session_obj.server_session, group) + + for group_obj in group_list: + print('Group: {}'.format(group_obj.name)) + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + E.g.: + @run_async + def task1(): + do_something + @run_async + def task2(): + do_something_too + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +@run_async +def main_workflow(engine): + """ + This function actually runs the jobs. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + + engine: Dictionary of engines + """ + try: + #Setup the connection to the Delphix Engine + dx_session_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + + except DlpxException as e: + print_exception('\nERROR: Engine {} encountered an error while' + '{}:\n{}\n'.format(engine['hostname'], + arguments['--target'], e)) + sys.exit(1) + + thingstodo = ["thingtodo"] + try: + with dx_session_obj.job_mode(single_thread): + while (len(dx_session_obj.jobs) > 0 or len(thingstodo)> 0): + if len(thingstodo) > 0: + if arguments['--add'] : + add_group(arguments['--group_name']) + elif arguments['--delete']: + delete_group(arguments['--group_name']) + elif arguments['--list']: + list_groups() + thingstodo.pop() + # get all the jobs, then inspect them + i = 0 + for j in dx_session_obj.jobs.keys(): + job_obj = job.get(dx_session_obj.server_session, + dx_session_obj.jobs[j]) + print_debug(job_obj) + print_info('{}: Group: {}'.format( + engine['hostname'], job_obj.job_state)) + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + # If the job is in a non-running state, remove it + # from the + # running jobs list. + del dx_session_obj.jobs[j] + elif job_obj.job_state in 'RUNNING': + # If the job is in a running state, increment the + # running job count. + i += 1 + print_info('{}: {:d} jobs running.'.format( + engine['hostname'], i)) + # If we have running jobs, pause before repeating the + # checks. + if len(dx_session_obj.jobs) > 0: + sleep(float(arguments['--poll'])) + + except (HttpError, RequestError, JobError, DlpxException) as e: + print_exception('ERROR: Could not complete group ' + 'operation: {}'.format(e)) + + +def run_job(): + """ + This function runs the main_workflow aynchronously against all the servers + specified + """ + #Create an empty list to store threads we create. + threads = [] + engine = None + + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + + try: + #For each server in the dxtools.conf... + for delphix_engine in dx_session_obj.dlpx_engines: + engine = dx_session_obj[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + + except DlpxException as e: + print 'Error encountered in run_job():\n{}'.format(e) + sys.exit(1) + + elif arguments['--all'] is False: + #Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dx_session_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: {}\n'.format( + (arguments['--engine']))) + + except (DlpxException, RequestError, KeyError) as e: + raise DlpxException('\nERROR: Delphix Engine {} cannot be ' + 'found in {}. Please check your value ' + 'and try again. Exiting.\n'.format( + arguments['--engine'], config_file_path)) + + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dx_session_obj.dlpx_engines: + if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ + 'true': + + engine = dx_session_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: {}'.format( + dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) + + break + + if engine == None: + raise DlpxException("\nERROR: No default engine found. Exiting") + + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + #elapsed_minutes = round((time() - time_start)/60, +1) + #return elapsed_minutes + return round((time() - time_start)/60, +1) + + +def main(arguments): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global usebackup + global time_start + global config_file_path + global dx_session_obj + global debug + + if arguments['--debug']: + debug = True + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + single_thread = False + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + #This is the function that will handle processing main_workflow for + # all the servers. + run_job() + + elapsed_minutes = time_elapsed() + print_info('script took {:.2f} minutes to get this far.'.format( + elapsed_minutes)) + + #Here we handle what we do when the unexpected happens + except DlpxException as e: + print_exception('script encountered an error while processing the' + 'config file:\n{}'.format(e)) + + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print_exception('Connection failed to the Delphix Engine' + 'Please check the ERROR message:\n{}'.format(e)) + sys.exit(1) + + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so that + we have actionable data + """ + elapsed_minutes = time_elapsed() + print_exception('A job failed in the Delphix Engine') + print_info('{} took {:.2f} minutes to get this far\n{}'.format( + basename(__file__), elapsed_minutes, e)) + sys.exit(3) + + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far\n'.format( + basename(__file__), elapsed_minutes)) + + except: + """ + Everything else gets caught here + """ + print_exception(sys.exc_info()[0]) + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far\n'.format( + basename(__file__), elapsed_minutes)) + sys.exit(1) + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + #Feed our arguments to the main function, and off we go! + main(arguments) diff --git a/v1_8_2/dx_jetstream_container.py b/v1_8_2/dx_jetstream_container.py new file mode 100755 index 0000000..af6ff98 --- /dev/null +++ b/v1_8_2/dx_jetstream_container.py @@ -0,0 +1,588 @@ +#!/usr/bin/env python +#Adam Bowen - Jun 2016 +#dx_jetstream_container.py +#Use this file as a starter for your python scripts, if you like +#requirements +#pip install docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. This thing is brilliant. +"""Perform routine operations on Jetstream containers + +Usage: + dx_jetstream_container.py --template (--container | --all_containers ) + --operation [-d | --engine | --all] + [--bookmark_name ] [--bookmark_tags ] [--bookmark_shared ] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_jetstream_container.py -h | --help | -v | --version + +Perform routine operations on a Jetstream Container + +Examples: + dx_jetstream_container.py --operation refresh --template "Masked SugarCRM Application" --container "Sugar Automated Testing Container" + dx_jetstream_container.py --operation reset --template "Masked SugarCRM Application" --all_containers + dx_jetstream_container.py --template "Masked SugarCRM Application" --container "Sugar Automated Testing Container" --operation bookmark --bookmark_name "Testing" --bookmark_tags "one,two,three" --bookmark_shared true + +Options: + -d Identifier of Delphix engine in dxtools.conf. + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --all_containers Run against all jetstream containers + --template Name of Jetstream template to execute against. + --container Name of Jetstream container to execute against. + --operation Name of the operation to execute + Can be one of: + start, stop, recover, refresh, reset, bookmark + --bookmark_name Name of the bookmark to create + (only valid with "--operation bookmark") + --bookmark_tags Comma-delimited list to tag the bookmark + (only valid with "--operation bookmark") + --bookmark_shared Share bookmark: true/false + [default: false] + --host Name of environment in Delphix to execute against. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_jetstream_container_refresh.log] + -h --help Show this screen. + -v --version Show version. + +""" + +VERSION="v.0.0.005" + + +from docopt import docopt +import logging +from os.path import basename +import signal +import sys +import time +import traceback +import json +import threading + +from multiprocessing import Process +from time import sleep, time + +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.exceptions import HttpError, JobError +from delphixpy import job_context +from delphixpy.web import jetstream, job +from delphixpy.web.vo import JSBookmark, JSBookmarkCreateParameters, JSTimelinePointLatestTimeInput + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + + E.g.: + @run_async + def task1(): + do_something + + @run_async + def task2(): + do_something_too + + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + #from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = threading.Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + +@run_async +def container_bookmark(engine, server, container_obj, bookmark_name, bookmark_shared, tags): + '''This function bookmarks the current branch on the container''' + #But first, let's make sure it is in a CONSISTENT state + container_recover(engine, server, container_obj) + #Next let's make sure it is started + container_start(engine, server, container_obj) + #Prepare the bookmark creation parameters + bookmark_create_params = JSBookmarkCreateParameters() + bookmark_create_params.bookmark = JSBookmark() + bookmark_create_params.bookmark.name = bookmark_name + bookmark_create_params.bookmark.branch = container_obj.active_branch + bookmark_create_params.bookmark.shared = bookmark_shared + bookmark_create_params.bookmark.tags = tags + bookmark_create_params.timeline_point_parameters = JSTimelinePointLatestTimeInput() + bookmark_create_params.timeline_point_parameters.source_data_layout = container_obj.reference + + jetstream.bookmark.create(server, bookmark_create_params) + +def container_recover(engine, server, container_obj): + '''This function recovers a container that is in an "INCONSISTENT" state''' + if container_obj.state == "INCONSISTENT": + #if not recover it + job_obj = jetstream.container.recover(server, container_obj.reference) + #wait for the recovery action to finish + job_context.wait(server,job_obj.reference) + #get the updated object with the new state + container_obj = jetstream.container.get(server, container_obj.reference) + return container_obj + +@run_async +def container_recover_async(engine, server, container_obj): + '''This function recovers all specified containers asynchronously''' + container_recover(engine, server, container_obj) + +@run_async +def container_refresh(engine, server, container_obj): + '''This function refreshes a container''' + #But first, let's make sure it is in a CONSISTENT state + container_recover(engine, server, container_obj) + #Next let's make sure it is started + container_start(engine, server, container_obj) + #Now let's refresh it. + refresh_job = jetstream.container.refresh(server, container_obj.reference) + +@run_async +def container_reset(engine, server, container_obj): + '''This function resets a container''' + #But first, let's make sure it is in a CONSISTENT state + container_recover(engine, server, container_obj) + #Next let's make sure it is started + container_start(engine, server, container_obj) + #Now let's refresh it. + reset_job = jetstream.container.reset(server, container_obj.reference) + +def container_start(engine, server, container_obj): + '''This function starts/enables a container that is in an "OFFLINE" state''' + if container_obj.state == "OFFLINE": + #if not, enable it + jetstream.container.enable(server, container_obj.reference) + +@run_async +def container_start_async(engine, server, container_obj): + '''This function starts all specified containers asynchronously''' + container_start(engine, server, container_obj) + +def container_stop(engine, server, container_obj): + '''This function starts/enables a container that is in an "OFFLINE" state''' + if container_obj.state == "ONLINE": + #if not, enable it + jetstream.container.disable(server, container_obj.reference) + +@run_async +def container_stop_async(engine, server, container_obj): + '''This function starts all specified containers asynchronously''' + container_stop(engine, server, container_obj) + +def find_container_by_name_and_template_name(engine, server, container_name, template_name): + template_obj = find_obj_by_name(engine, server, jetstream.template, template_name) + + containers = jetstream.container.get_all(server, template=template_obj.reference) + + for each in containers: + if each.name == container_name: + print_debug(engine["hostname"] + ": Found a match " + str(each.reference)) + return each + print_info("Unable to find \"" + container_name + "\" in " + template_name) + +def find_all_containers_by_template_name(engine, server, template_name): + template_obj = find_obj_by_name(engine, server, jetstream.template, template_name) + + containers = jetstream.container.get_all(server, template=template_obj.reference) + if containers: + for each in containers: + print_debug(engine["hostname"] + ": Found a match " + str(each.reference)) + return containers + print_info("Unable to find \"" + container_name + "\" in " + template_name) + +def find_obj_by_name(engine, server, f_class, obj_name): + """ + Function to find objects by name and object class, and return object's reference as a string + You might use this function to find objects like groups. + """ + print_debug(engine["hostname"] + ": Searching objects in the " + f_class.__name__ + " class\n for one named \"" + obj_name +"\"") + obj_ref = '' + + all_objs = f_class.get_all(server) + for obj in all_objs: + if obj.name == obj_name: + print_debug(engine["hostname"] + ": Found a match " + str(obj.reference)) + return obj + +def get_config(config_file_path): + """ + This function reads in the dxtools.conf file + """ + #First test to see that the file is there and we can open it + try: + config_file = open(config_file_path).read() + except: + print_error("Was unable to open " + config_file_path + ". Please check the path and permissions, then try again.") + sys.exit(1) + #Now parse the file contents as json and turn them into a python dictionary, throw an error if it isn't proper json + try: + config = json.loads(config_file) + except: + print_error("Was unable to read " + config_file_path + " as json. Please check file in a json formatter and try again.") + sys.exit(1) + #Create a dictionary of engines (removing the data node from the dxtools.json, for easier parsing) + delphix_engines = {} + for each in config['data']: + delphix_engines[each['hostname']] = each + print_debug(delphix_engines) + return delphix_engines + +def logging_est(logfile_path): + """ + Establish Logging + """ + global debug + logging.basicConfig(filename=logfile_path,format='%(levelname)s:%(asctime)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') + print_info("Welcome to " + basename(__file__) + ", version " + VERSION) + global logger + debug = arguments['--debug'] + logger = logging.getLogger() + if debug == True: + logger.setLevel(10) + print_info("Debug Logging is enabled.") + +def job_mode(server): + """ + This function tells Delphix how to execute jobs, based on the single_thread variable at the beginning of the file + """ + #Synchronously (one at a time) + if single_thread == True: + job_m = job_context.sync(server) + print_debug("These jobs will be executed synchronously") + #Or asynchronously + else: + job_m = job_context.async(server) + print_debug("These jobs will be executed asynchronously") + return job_m + +def job_wait(server): + """ + This job stops all work in the thread/process until jobs are completed. + """ + #Grab all the jos on the server (the last 25, be default) + all_jobs = job.get_all(server) + #For each job in the list, check to see if it is running (not ended) + for jobobj in all_jobs: + if not (jobobj.job_state in ["CANCELED", "COMPLETED", "FAILED"]): + print_debug("Waiting for " + jobobj.reference + " (currently: " + jobobj.job_state+ ") to finish running against the container") + #If so, wait + job_context.wait(server,jobobj.reference) + +def on_exit(sig, func=None): + """ + This function helps us end cleanly and with exit codes + """ + print_info("Shutdown Command Received") + print_info("Shutting down " + basename(__file__)) + sys.exit(0) + +def print_debug(print_obj): + """ + Call this function with a log message to prefix the message with DEBUG + """ + try: + if debug == True: + print "DEBUG: " + str(print_obj) + logging.debug(str(print_obj)) + except: + pass + +def print_error(print_obj): + """ + Call this function with a log message to prefix the message with ERROR + """ + print "ERROR: " + str(print_obj) + logging.error(str(print_obj)) + +def print_info(print_obj): + """ + Call this function with a log message to prefix the message with INFO + """ + print "INFO: " + str(print_obj) + logging.info(str(print_obj)) + +def print_warning(print_obj): + """ + Call this function with a log message to prefix the message with WARNING + """ + print "WARNING: " + str(print_obj) + logging.warning(str(print_obj)) + +def serversess(f_engine_address, f_engine_username, f_engine_password): + """ + Function to setup the session with the Delphix Engine + """ + server_session= DelphixEngine(f_engine_address, f_engine_username, f_engine_password, "DOMAIN") + return server_session + +def set_exit_handler(func): + """ + This function helps us set the correct exit code + """ + signal.signal(signal.SIGTERM, func) + +@run_async +def main_workflow(engine): + """ + This function is where the main workflow resides. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + """ + + #Pull out the values from the dictionary for this engine + engine_address = engine["ip_address"] + engine_username = engine["username"] + engine_password = engine["password"] + #Establish these variables as empty for use later + containers = [] + jobs = {} + + + #Setup the connection to the Delphix Engine + server = serversess(engine_address, engine_username, engine_password) + + #If we specified a specific database by name.... + if arguments['--container']: + #Get the container object from the name + container_obj = find_container_by_name_and_template_name(engine, server, arguments['--container'], arguments['--template']) + if container_obj: + containers.append(container_obj) + #Else, if we said all containers ... + elif arguments['--all_containers']: + #Grab all containers in the template + containers = find_all_containers_by_template_name(engine, server, arguments['--template']) + if not containers or len(containers) == 0: + print_error("No containers found with the criterion specified") + return + #reset the running job count before we begin + i = 0 + container_threads = [] + #While there are still running jobs or containers still to process.... + while (i > 0 or len(containers) > 0): + #While there are containers still to process and we are still under + #the max simultaneous jobs threshold (if specified) + while len(containers) > 0 and (arguments['--parallel'] == None or i < int(arguments['--parallel'])): + #Give us the next database in the list, and remove it from the list + container_obj = containers.pop() + #what do we want to do? + if arguments['--operation'] == "refresh": + #refresh the container + container_threads.append(container_refresh(engine, server, container_obj)) + elif arguments['--operation'] == "reset": + container_threads.append(container_reset(engine, server, container_obj)) + elif arguments['--operation'] == "start": + container_threads.append(container_start_async(engine, server, container_obj)) + elif arguments['--operation'] == "stop": + container_threads.append(container_stop_async(engine, server, container_obj)) + elif arguments['--operation'] == "recover": + container_threads.append(container_recover_async(engine, server, container_obj)) + elif arguments['--operation'] == "bookmark": + if arguments['--bookmark_tags']: + tags = arguments['--bookmark_tags'].split(',') + else: + tags = [] + if arguments['--bookmark_shared']: + if str(arguments['--bookmark_shared']).lower() == "true": + bookmark_shared = True + elif str(arguments['--bookmark_shared']).lower() == "false": + bookmark_shared = False + else: + print_error("Invalid argument \"" + str(arguments['--bookmark_shared']).lower() + "\" for --bookmark_shared") + print_error("--bookmark_shared only takes a value of true/false.") + print_error("Exiting") + sys.exit(1) + else: + bookmark_shared=False + container_threads.append(container_bookmark(engine, server, container_obj, arguments['--bookmark_name'], bookmark_shared, tags)) + #For each thread in the list... + i = len(container_threads) + #Check to see if we are running at max parallel processes, and report if so. + if ( arguments['--parallel'] != None and i >= int(arguments['--parallel'])): + print_info(engine["hostname"] + ": Max jobs reached (" + str(i) + ")") + #reset the running jobs counter, as we are about to update the count from the jobs report. + i=0 + for t in container_threads: + if t.isAlive(): + i+=1 + print_info(engine["hostname"] + ": " + str(i) + " jobs running. " + str(len(containers)) + " jobs waiting to run") + #If we have running jobs, pause before repeating the checks. + if i > 0: + sleep(float(arguments['--poll'])) + print "made it out" + #For each thread in the list... + for each in container_threads: + #join them back together so that we wait for all threads to complete before moving on + each.join() + +def run_job(engine): + """ + This function runs the main_workflow aynchronously against all the servers specified + """ + #Create an empty list to store threads we create. + threads = [] + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + #For each server in the dxtools.conf... + for delphix_engine in dxtools_objects: + engine = dxtools_objects[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + else: + #Else if the --engine argument was given, test to see if the engine exists in dxtools.conf + if arguments['--engine']: + try: + engine = dxtools_objects[arguments['--engine']] + print_info("Executing against Delphix Engine: " + arguments['--engine']) + except: + print_error("Delphix Engine \"" + arguments['--engine'] + "\" cannot be found in " + config_file_path) + print_error("Please check your value and try again. Exiting") + sys.exit(1) + #Else if the -d argument was given, test to see if the engine exists in dxtools.conf + elif arguments['-d']: + try: + engine = dxtools_objects[arguments['-d']] + print_info("Executing against Delphix Engine: " + arguments['-d']) + except: + print_error("Delphix Engine \"" + arguments['-d'] + "\" cannot be found in " + config_file_path) + print_error("Please check your value and try again. Exiting") + sys.exit(1) + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dxtools_objects: + if dxtools_objects[delphix_engine]['default'] == 'true': + engine = dxtools_objects[delphix_engine] + print_info("Executing against the default Delphix Engine in the dxtools.conf: " + dxtools_objects[delphix_engine]['hostname']) + break + if engine == None: + print_error("No default engine found. Exiting") + sys.exit(1) + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete before moving on + each.join() + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + elapsed_minutes = round((time() - time_start)/60, +1) + return elapsed_minutes + +def update_jobs_dictionary(engine, server, jobs): + """ + This function checks each job in the dictionary and updates its status or removes it if the job is complete. + Return the number of jobs still running. + """ + #Establish the running jobs counter, as we are about to update the count from the jobs report. + i = 0 + #get all the jobs, then inspect them + for j in jobs.keys(): + job_obj = job.get(server, jobs[j]) + print_debug(engine["hostname"] + ": " + str(job_obj)) + print_info(engine["hostname"] + ": " + j.name + ": " + job_obj.job_state) + + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + #If the job is in a non-running state, remove it from the running jobs list. + del jobs[j] + else: + #If the job is in a running state, increment the running job count. + i += 1 + return i + +def main(argv): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global usebackup + global time_start + global config_file_path + global dxtools_objects + + try: + #Declare globals that will be used throughout the script. + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + engine = None + single_thread = False + + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dxtools_objects = get_config(config_file_path) + + #This is the function that will handle processing main_workflow for all the servers. + run_job(engine) + + elapsed_minutes = time_elapsed() + print_info("script took " + str(elapsed_minutes) + " minutes to get this far.") + + + #Here we handle what we do when the unexpected happens + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print_error("Connection failed to the Delphix Engine") + print_error( "Please check the ERROR message below") + print_error(e.message) + sys.exit(2) + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so that we have actionable data + """ + print_error("A job failed in the Delphix Engine") + print_error(e.job) + elapsed_minutes = time_elapsed() + print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") + sys.exit(3) + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") + except: + """ + Everything else gets caught here + """ + print_error(sys.exc_info()[0]) + print_error(traceback.format_exc()) + elapsed_minutes = time_elapsed() + print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") + sys.exit(1) + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + + #Feed our arguments to the main function, and off we go! + print arguments + main(arguments) diff --git a/v1_8_2/dx_jobs.py b/v1_8_2/dx_jobs.py new file mode 100755 index 0000000..b04eedb --- /dev/null +++ b/v1_8_2/dx_jobs.py @@ -0,0 +1,367 @@ +#!/usr/bin/env python +# Corey Brune - Oct 2016 +#Description: +# List jobs on a given engine +# +#Requirements +#pip install docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. +"""List jobs on an engine +Usage: + dx_jobs.py (--list [--state ][--title ]) + [--engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_jobs.py -h | --help | -v | --version + +List jobs on an engine + +Examples: + dx_jobs.py --list --state failed + dx_jobs.py --list --title snapsync + dx_jobs.py --list --state failed --title snapsync + + +Options: + --list List all jobs on an engine. + --title Filter job by title name. Note: The search is case insensitive. + --state Filter jobs by state: RUNNING, SUSPENDED, CANCELED, COMPLETED, FAILED + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_operations_vdb.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = 'v.0.0.002' + +import sys +import re +from os.path import basename +from time import sleep, time +from docopt import docopt + +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy.web import job + +from lib.DlpxException import DlpxException +from lib.DxLogging import logging_est +from lib.DxLogging import print_debug +from lib.DxLogging import print_info +from lib.DxLogging import print_exception +from lib.GetSession import GetSession + + +def list_jobs(): + + if arguments['--state']: + if re.match('RUNNING|SUSPENDED|CANCELED|COMPLETED|FAILED', + arguments['--state'].upper()): + pass + else: + print_info('The state should be one of these options:\n' + 'RUNNING, SUSPENDED, CANCELED, COMPLETED, FAILED') + sys.exit(1) + + for job_info in job.get_all(dx_session_obj.server_session, + job_state=arguments['--state'].upper()): + + if arguments['--title']: + if re.search(arguments['--title'], job_info.title, + re.IGNORECASE): + print('Action={}, Job State={}, Parent Action State={},' + 'Percent Complete={}, Reference={}, Target={},' + 'Target Name={}, Title={}, User={}\n'.format( + job_info.action_type, job_info.job_state, + job_info.parent_action_state, + job_info.percent_complete, job_info.reference, + job_info.target, job_info.target_name, + job_info.title, job_info.user)) + else: + print('Action=%s, Job State=%s, Parent Action State=%s,' + 'Percent Complete=%s, Reference=%s, Target=%s,' + 'Target Name=%s, Title=%s, User=%s\n' % + (job_info.action_type, job_info.job_state, + job_info.parent_action_state, + job_info.percent_complete, job_info.reference, + job_info.target, job_info.target_name, + job_info.title, job_info.user)) + else: + for job_info in job.get_all(dx_session_obj.server_session): + + if arguments['--title']: + if re.search(arguments['--title'], job_info.title, + re.IGNORECASE): + print('Action=%s, Job State=%s, Parent Action State=%s,' + 'Percent Complete=%s, Reference=%s, Target=%s,' + 'Target Name=%s, Title=%s, User=%s\n' % + (job_info.action_type, job_info.job_state, + job_info.parent_action_state, job_info.percent_complete, + job_info.reference, job_info.target, job_info.target_name, + job_info.title, job_info.user)) + else: + print('Action=%s, Job State=%s, Parent Action State=%s,' + 'Percent Complete=%s, Reference=%s, Target=%s,' + 'Target Name=%s, Title=%s, User=%s\n' % + (job_info.action_type, job_info.job_state, + job_info.parent_action_state, job_info.percent_complete, + job_info.reference, job_info.target, + job_info.target_name, job_info.title, job_info.user)) + + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + E.g.: + @run_async + def task1(): + do_something + @run_async + def task2(): + do_something_too + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +@run_async +def main_workflow(engine): + """ + This function actually runs the jobs. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + + engine: Dictionary of engines + """ + jobs = {} + + try: + #Setup the connection to the Delphix Engine + dx_session_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + + except DlpxException as e: + print_exception('\nERROR: Engine {} encountered an error while' + '{}:\n{}\n'.format(engine['hostname'], + arguments['--target'], e)) + sys.exit(1) + + thingstodo = ["thingtodo"] + with dx_session_obj.job_mode(single_thread): + while len(dx_session_obj.jobs) > 0 or len(thingstodo) > 0: + if len(thingstodo) > 0: + + if arguments['--list']: + list_jobs() + thingstodo.pop() + + # get all the jobs, then inspect them + i = 0 + for j in dx_session_obj.jobs.keys(): + job_obj = job.get(dx_session_obj.server_session, + dx_session_obj.jobs[j]) + print_debug(job_obj) + print_info('{}: Operations: {}'.format(engine['hostname'], + job_obj.job_state)) + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + # If the job is in a non-running state, remove it from the + # running jobs list. + del dx_session_obj.jobs[j] + elif job_obj.job_state in 'RUNNING': + # If the job is in a running state, increment the running + # job count. + i += 1 + + print_info('{}: {:d} jobs running.'.format( + engine['hostname'], i)) + + # If we have running jobs, pause before repeating the checks. + if len(dx_session_obj.jobs) > 0: + sleep(float(arguments['--poll'])) + + +def run_job(): + """ + This function runs the main_workflow aynchronously against all the servers + specified + """ + #Create an empty list to store threads we create. + threads = [] + + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + + try: + #For each server in the dxtools.conf... + for delphix_engine in dx_session_obj.dlpx_engines: + engine = dx_session_obj[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + + except DlpxException as e: + print 'Error encountered in run_job():\n{}'.format(e) + sys.exit(1) + + elif arguments['--all'] is False: + #Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dx_session_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: %s\n' % + (arguments['--engine'])) + + except (DlpxException, RequestError, KeyError) as e: + raise DlpxException('\nERROR: Delphix Engine %s cannot be ' 'found in %s. Please check your value ' + 'and try again. Exiting.\n' % ( + arguments['--engine'], config_file_path)) + + + + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dx_session_obj.dlpx_engines: + if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ + 'true': + + engine = dx_session_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: %s' % ( + dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) + + break + + if engine == None: + raise DlpxException("\nERROR: No default engine found. Exiting") + + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + elapsed_minutes = round((time() - time_start)/60, +1) + return elapsed_minutes + + +def main(argv): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global usebackup + global time_start + global config_file_path + global database_name + global dx_session_obj + global debug + + if arguments['--debug']: + debug = True + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + engine = None + single_thread = False + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + #This is the function that will handle processing main_workflow for + # all the servers. + run_job() + + elapsed_minutes = time_elapsed() + print_info("script took " + str(elapsed_minutes) + + " minutes to get this far.") + + #Here we handle what we do when the unexpected happens + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print_exception('Connection failed to the Delphix Engine' + 'Please check the ERROR message below') + sys.exit(1) + + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so that + we have actionable data + """ + elapsed_minutes = time_elapsed() + print_exception('A job failed in the Delphix Engine') + print_info('%s took %s minutes to get this far\n' % + (basename(__file__), str(elapsed_minutes))) + sys.exit(3) + + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info('%s took %s minutes to get this far\n' % + (basename(__file__), str(elapsed_minutes))) + + except: + """ + Everything else gets caught here + """ + print_exception(sys.exc_info()[0]) + elapsed_minutes = time_elapsed() + print_info('%s took %s minutes to get this far\n' % + (basename(__file__), str(elapsed_minutes))) + sys.exit(1) + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + #Feed our arguments to the main function, and off we go! + main(arguments) diff --git a/v1_8_2/dx_operations.py b/v1_8_2/dx_operations.py new file mode 100755 index 0000000..2fb0468 --- /dev/null +++ b/v1_8_2/dx_operations.py @@ -0,0 +1,415 @@ +#!/usr/bin/env python +# Corey Brune - Oct 2016 +# This script starts or stops a VDB +# requirements +# pip install docopt delphixpy + +# The below doc follows the POSIX compliant standards and allows us to use +# this doc to also define our arguments for the script. +"""List all VDBs or Start, stop, enable, disable a VDB +Usage: + dx_operations_vdb.py (--vdb [--stop | --start | --enable | --disable] | --list | --all_dbs ) + [-d | --engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_operations_vdb.py -h | --help | -v | --version +List all VDBs, start, stop, enable, disable a VDB + +Examples: + dx_operations_vdb.py --engine landsharkengine --vdb testvdb --stop + dx_operations_vdb.py --vdb testvdb --start + dx_operations_vdb.py --all_dbs enable + dx_operations_vdb.py --all_dbs disable + dx_operations_vdb.py --list + +Options: + --vdb Name of the VDB to stop or start + --start Stop the VDB + --stop Stop the VDB + --all_dbs Enable or disable all dSources and VDBs + --list List all databases from an engine + --enable Enable the VDB + --disable Disable the VDB + -d Identifier of Delphix engine in dxtools.conf. + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_operations_vdb.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = 'v.0.3.015' + +import sys +from os.path import basename +from time import sleep, time +import traceback + +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy.web import database +from delphixpy.web import job +from delphixpy.web import source +from delphixpy.web.capacity import consumer +from docopt import docopt + +from lib.DlpxException import DlpxException +from lib.DxLogging import logging_est +from lib.DxLogging import print_debug +from lib.DxLogging import print_info +from lib.DxLogging import print_exception +from lib.GetReferences import find_obj_by_name +from lib.GetReferences import find_all_objects +from lib.GetReferences import find_obj_list +from lib.GetSession import GetSession + + +def dx_obj_operation(dlpx_obj, vdb_name, operation): + """ + Function to start, stop, enable or disable a VDB + + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession + :param vdb_name: Name of the object to stop/start/enable/disable + :type vdb_name: str + :param operation: enable or disable dSources and VDBs + :type operation: str + """ + + print_debug('Searching for {} reference.\n'.format(vdb_name)) + engine_name = dlpx_obj.dlpx_engines.keys()[0] + vdb_obj = find_obj_by_name(dlpx_obj.server_session, source, vdb_name) + try: + if vdb_obj: + if operation == 'start': + source.start(dlpx_obj.server_session, vdb_obj.reference) + elif operation == 'stop': + source.stop(dlpx_obj.server_session, vdb_obj.reference) + elif operation == 'enable': + source.enable(dlpx_obj.server_session, vdb_obj.reference) + elif operation == 'disable': + source.disable(dlpx_obj.server_session, + vdb_obj.reference) + dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job + except (RequestError, HttpError, JobError, AttributeError), e: + print_exception('An error occurred while performing {} on {}:\n' + '{}'.format(operation, vdb_name, e)) + print '{} was successfully performed on {}.'.format(operation, vdb_name) + + +def all_databases(dlpx_obj, operation): + """ + Enable or disable all dSources and VDBs on an engine + + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession + :param operation: enable or disable dSources and VDBs + :type operation: str + """ + + for db in database.get_all(dlpx_obj.server_session): + try: + dx_obj_operation(dlpx_obj, db.name, operation) + except (RequestError, HttpError, JobError): + pass + print '{} {}\n'.format(operation, db.name) + sleep(2) + + +def list_databases(dlpx_obj): + """ + Function to list all databases for a given engine + + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession + """ + + source_stats_lst = find_all_objects(dlpx_obj.server_session, source) + is_dSource = None + try: + for db_stats in find_all_objects(dlpx_obj.server_session, + consumer): + source_stats = find_obj_list(source_stats_lst, db_stats.name) + if source_stats is not None: + if source_stats.virtual is False: + is_dSource = 'dSource' + elif source_stats.virtual is True: + is_dSource = db_stats.parent + print('name: {},provision container: {},database disk ' + 'usage: {:.2f} GB,Size of Snapshots: {:.2f} GB,' + 'Enabled: {},Status:{},'.format(str(db_stats.name), + str(is_dSource), + db_stats.breakdown.active_space / 1024 / 1024 / 1024, + db_stats.breakdown.sync_space / 1024 / 1024 / 1024, + source_stats.runtime.enabled, + source_stats.runtime.status)) + elif source_stats is None: + print('name = {},provision container= {},database disk ' + 'usage: {:.2f} GB,Size of Snapshots: {:.2f} GB,' + 'Could not find source information. This could be a ' + 'result of an unlinked object'.format( + str(db_stats.name), str(db_stats.parent), + db_stats.breakdown.active_space / 1024 / 1024 / 1024, + db_stats.breakdown.sync_space / 1024 / 1024 / 1024)) + except (RequestError, JobError, AttributeError, DlpxException) as e: + print 'An error occurred while listing databases: {}'.format((e)) + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + E.g.: + @run_async + def task1(): + do_something + @run_async + def task2(): + do_something_too + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +@run_async +def main_workflow(engine, dlpx_obj): + """ + This function is where we create our main workflow. + Use the @run_async decorator to run this function asynchronously. + The @run_async decorator allows us to run against multiple Delphix Engine + simultaneously + + :param engine: Dictionary of engines + :type engine: dictionary + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession + """ + + try: + # Setup the connection to the Delphix Engine + dlpx_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + + except DlpxException as e: + print_exception('ERROR: Engine {} encountered an error while' + '{}:\n{}\n'.format(engine['hostname'], + arguments['--target'], e)) + sys.exit(1) + + thingstodo = ["thingtodo"] + try: + with dlpx_obj.job_mode(single_thread): + while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0: + if len(thingstodo)> 0: + if arguments['--start']: + dx_obj_operation(dlpx_obj, arguments['--vdb'], 'start') + elif arguments['--stop']: + dx_obj_operation(dlpx_obj, arguments['--vdb'], 'stop') + elif arguments['--enable']: + dx_obj_operation(dlpx_obj, arguments['--vdb'], 'enable') + elif arguments['--disable']: + dx_obj_operation(dlpx_obj, arguments['--vdb'], + 'disable') + elif arguments['--list']: + list_databases(dlpx_obj) + elif arguments['--all_dbs']: + all_databases(dlpx_obj, arguments['--all_dbs']) + thingstodo.pop() + # get all the jobs, then inspect them + i = 0 + for j in dlpx_obj.jobs.keys(): + job_obj = job.get(dlpx_obj.server_session, dlpx_obj.jobs[j]) + print_debug(job_obj) + print_info('{}: Running JS Bookmark: {}'.format( + engine['hostname'], job_obj.job_state)) + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + # If the job is in a non-running state, remove it + # from the running jobs list. + del dlpx_obj.jobs[j] + elif job_obj.job_state in 'RUNNING': + # If the job is in a running state, increment the + # running job count. + i += 1 + print_info('{}: {:d} jobs running.'.format( + engine['hostname'], i)) + # If we have running jobs, pause before repeating the + # checks. + if len(dlpx_obj.jobs) > 0: + sleep(float(arguments['--poll'])) + except (DlpxException, RequestError, JobError, HttpError) as e: + print_exception('Error in js_bookmark: {}\n{}'.format( + engine['hostname'], e)) + sys.exit(1) + + +def time_elapsed(time_start): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + + :param time_start: start time of the script. + :type time_start: float + """ + return round((time() - time_start)/60, +1) + + +def run_job(dlpx_obj, config_file_path): + """ + This function runs the main_workflow aynchronously against all the + servers specified + + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession + :param config_file_path: string containing path to configuration file. + :type config_file_path: str + """ + + # Create an empty list to store threads we create. + threads = [] + engine = None + + # If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info('Executing against all Delphix Engines in the dxtools.conf') + try: + # For each server in the dxtools.conf... + for delphix_engine in dlpx_obj.dlpx_engines: + engine = dlpx_obj.dlpx_engines[delphix_engine] + # Create a new thread and add it to the list. + threads.append(main_workflow(engine, dlpx_obj)) + except DlpxException as e: + print_exception('Error encountered in run_job():\n{}'.format(e)) + sys.exit(1) + + elif arguments['--all'] is False: + # Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dlpx_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: {}\n'.format( + arguments['--engine'])) + except (DlpxException, RequestError, KeyError): + raise DlpxException('\nERROR: Delphix Engine {} cannot be ' + 'found in {}. Please check your value and' + ' try again. Exiting.\n'.format( + arguments['--engine'], config_file_path)) + else: + # Else search for a default engine in the dxtools.conf + for delphix_engine in dlpx_obj.dlpx_engines: + if dlpx_obj.dlpx_engines[delphix_engine]['default'] == 'true': + engine = dlpx_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: {}'.format( + dlpx_obj.dlpx_engines[delphix_engine]['hostname'])) + break + + if engine is None: + raise DlpxException('\nERROR: No default engine found. Exiting') + + # run the job against the engine + threads.append(main_workflow(engine, dlpx_obj)) + + # For each thread in the list... + for each in threads: + # join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def main(): + # We want to be able to call on these variables anywhere in the script. + global single_thread + global debug + + time_start = time() + single_thread = False + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + config_file_path = arguments['--config'] + # Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + # This is the function that will handle processing main_workflow for + # all the servers. + run_job(dx_session_obj, config_file_path) + + elapsed_minutes = time_elapsed(time_start) + print_info('script took {:.2f} minutes to get this far.'.format( + elapsed_minutes)) + + # Here we handle what we do when the unexpected happens + except SystemExit as e: + # This is what we use to handle our sys.exit(#) + sys.exit(e) + + except DlpxException as e: + # We use this exception handler when an error occurs in a function call. + print_exception('ERROR: Please check the ERROR message below:\n' + '{}'.format(e.message)) + sys.exit(2) + + except HttpError as e: + # We use this exception handler when our connection to Delphix fails + print_exception('ERROR: Connection failed to the Delphix Engine. Please' + 'check the ERROR message below:\n{}'.format(e.message)) + sys.exit(2) + + except JobError as e: + # We use this exception handler when a job fails in Delphix so that we + # have actionable data + print_exception('A job failed in the Delphix Engine:\n{}'.format(e.job)) + elapsed_minutes = time_elapsed(time_start) + print_exception('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + sys.exit(3) + + except KeyboardInterrupt: + # We use this exception handler to gracefully handle ctrl+c exits + print_debug('You sent a CTRL+C to interrupt the process') + elapsed_minutes = time_elapsed(time_start) + print_info('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + except: + # Everything else gets caught here + print_exception('{}\n{}'.format(sys.exc_info()[0], + traceback.format_exc())) + elapsed_minutes = time_elapsed(time_start) + print_info("{} took {:.2f} minutes to get this far".format( + basename(__file__), elapsed_minutes)) + sys.exit(1) + + +if __name__ == "__main__": + # Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + + # Feed our arguments to the main function, and off we go! + main() diff --git a/v1_8_2/dx_operations_vdb.py b/v1_8_2/dx_operations_vdb.py new file mode 100755 index 0000000..4438e85 --- /dev/null +++ b/v1_8_2/dx_operations_vdb.py @@ -0,0 +1,424 @@ +#!/usr/bin/env python +# Corey Brune - Oct 2016 +#This script starts or stops a VDB +#requirements +#pip install docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. +"""List all VDBs or Start, stop, enable, disable a VDB +Usage: + dx_operations_vdb.py (--vdb [--stop | --start | --enable | --disable] | --list | --all_dbs ) + [-d | --engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_operations_vdb.py -h | --help | -v | --version +List all VDBs, start, stop, enable, disable a VDB + +Examples: + dx_operations_vdb.py --engine landsharkengine --vdb testvdb --stop + dx_operations_vdb.py --vdb testvdb --start + dx_operations_vdb.py --all_dbs enable + dx_operations_vdb.py --all_dbs disable + dx_operations_vdb.py --list + +Options: + --vdb Name of the VDB to stop or start + --start Stop the VDB + --stop Stop the VDB + --all_dbs Enable or disable all dSources and VDBs + --list List all databases from an engine + --enable Enable the VDB + --disable Disable the VDB + -d Identifier of Delphix engine in dxtools.conf. + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_operations_vdb.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = 'v.0.3.002' + +import sys +from os.path import basename +from time import sleep, time + +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy.web import database +from delphixpy.web import job +from delphixpy.web import source +from delphixpy.web.capacity import consumer +from docopt import docopt + +from lib.DlpxException import DlpxException +from lib.DxLogging import logging_est +from lib.DxLogging import print_debug +from lib.DxLogging import print_info +from lib.DxLogging import print_exception +from lib.GetReferences import find_obj_by_name +from lib.GetReferences import find_all_objects +from lib.GetReferences import find_obj_list +from lib.GetReferences import find_source_by_dbname +from lib.GetSession import GetSession + + +def vdb_operation(vdb_name, operation): + """ + Function to start, stop, enable or disable a VDB + """ + print_debug('Searching for {} reference.\n'.format(vdb_name)) + + vdb_obj = find_source_by_dbname(dx_session_obj.server_session, database, vdb_name) + try: + if vdb_obj: + if operation == 'start': + source.start(dx_session_obj.server_session, vdb_obj.reference) + elif operation == 'stop': + source.stop(dx_session_obj.server_session, vdb_obj.reference) + elif operation == 'enable': + source.enable(dx_session_obj.server_session, vdb_obj.reference) + elif operation == 'disable': + source.disable(dx_session_obj.server_session, + vdb_obj.reference) + dx_session_obj.jobs[dx_session_obj.server_session.address] = \ + dx_session_obj.server_session.last_job + + except (RequestError, HttpError, JobError, AttributeError), e: + print('An error occurred while performing {} on {}.:' + '{}\n'.format(operation, vdb_name, e)) + + +def all_databases(operation): + """ + Enable or disable all dSources and VDBs on an engine + + operation: enable or disable dSources and VDBs + """ + + for db in database.get_all(dx_session_obj.server_session, no_js_container_data_source=True): + print '{} {}\n'.format(operation, db.name) + vdb_operation(db.name, operation) + sleep(2) + + +def list_databases(): + """ + Function to list all databases for a given engine + """ + + source_stats_lst = find_all_objects(dx_session_obj.server_session, source) + is_dSource = None + + try: + for db_stats in find_all_objects(dx_session_obj.server_session, + consumer): + + source_stats = find_obj_list(source_stats_lst, db_stats.name) + + if source_stats is not None: + if source_stats.virtual is False: + is_dSource = 'dSource' + + elif source_stats.virtual is True: + is_dSource = db_stats.parent + + print('name = {}\nprovision container= {}\ndatabase disk ' + 'usage: {:.2f} GB\nSize of Snapshots: {:.2f} GB\n' + 'Enabled: {}\nStatus:{}\n'.format(str(db_stats.name), + str(is_dSource), + db_stats.breakdown.active_space / 1024 / 1024 / 1024, + db_stats.breakdown.sync_space / 1024 / 1024 / 1024, + source_stats.runtime.enabled, + source_stats.runtime.status)) + + elif source_stats is None: + print('name = {}\nprovision container= {}\ndatabase disk ' + 'usage: {:.2f} GB\nSize of Snapshots: {:.2f} GB\n' + 'Could not find source information. This could be a ' + 'result of an unlinked object.\n'.format( + str(db_stats.name), str(db_stats.parent), + db_stats.breakdown.active_space / 1024 / 1024 / 1024, + db_stats.breakdown.sync_space / 1024 / 1024 / 1024)) + + + except (RequestError, JobError, AttributeError, DlpxException) as e: + print 'An error occurred while listing databases: {}'.format((e)) + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + E.g.: + @run_async + def task1(): + do_something + @run_async + def task2(): + do_something_too + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +@run_async +def main_workflow(engine): + """ + This function actually runs the jobs. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + + engine: Dictionary of engines + """ + jobs = {} + + try: + #Setup the connection to the Delphix Engine + dx_session_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + + except DlpxException as e: + print_exception('\nERROR: Engine {} encountered an error while' + '{}:\n{}\n'.format(engine['hostname'], + arguments['--target'], e)) + sys.exit(1) + + thingstodo = ["thingtodo"] + with dx_session_obj.job_mode(single_thread): + while len(dx_session_obj.jobs) > 0 or len(thingstodo) > 0: + if len(thingstodo)> 0: + + if arguments['--start']: + vdb_operation(arguments['--vdb'], 'start') + + elif arguments['--stop']: + vdb_operation(arguments['--vdb'], 'stop') + + elif arguments['--enable']: + vdb_operation(arguments['--vdb'], 'enable') + + elif arguments['--disable']: + vdb_operation(arguments['--vdb'], 'disable') + + elif arguments['--list']: + list_databases() + + elif arguments['--all_dbs']: + try: + assert arguments['--all_dbs'] in 'disable' or \ + arguments['--all_dbs'] in 'enable', \ + '--all_dbs should be either enable or disable' + all_databases(arguments['--all_dbs']) + + except AssertionError as e: + print 'ERROR:\n{}\n'.format(e) + sys.exit(1) + + thingstodo.pop() + + #get all the jobs, then inspect them + i = 0 + for j in dx_session_obj.jobs.keys(): + job_obj = job.get(dx_session_obj.server_session, + dx_session_obj.jobs[j]) + print_debug(job_obj) + print_info('{}: Operations: {}'.format(engine['hostname'], + job_obj.job_state)) + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + #If the job is in a non-running state, remove it from the + # running jobs list. + del dx_session_obj.jobs[j] + elif job_obj.job_state in 'RUNNING': + #If the job is in a running state, increment the running + # job count. + i += 1 + + print_info('{}: {:d} jobs running.'.format( + engine['hostname'], i)) + + #If we have running jobs, pause before repeating the checks. + if len(dx_session_obj.jobs) > 0: + sleep(float(arguments['--poll'])) + + +def run_job(): + """ + This function runs the main_workflow aynchronously against all the servers + specified + """ + #Create an empty list to store threads we create. + threads = [] + engine = None + + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + + try: + #For each server in the dxtools.conf... + for delphix_engine in dx_session_obj.dlpx_engines: + engine = dx_session_obj[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + + except DlpxException as e: + print 'Error encountered in run_job():\n{}'.format(e) + sys.exit(1) + + elif arguments['--all'] is False: + #Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dx_session_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: {}\n'.format( + (arguments['--engine']))) + + except (DlpxException, RequestError, KeyError) as e: + raise DlpxException('\nERROR: Delphix Engine {} cannot be ' + 'found in {}. Please check your value ' + 'and try again. Exiting.\n'.format( + arguments['--engine'], config_file_path)) + + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dx_session_obj.dlpx_engines: + if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ + 'true': + + engine = dx_session_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: {}'.format( + dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) + break + + if engine == None: + raise DlpxException("\nERROR: No default engine found. Exiting") + + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + #elapsed_minutes = round((time() - time_start)/60, +1) + #return elapsed_minutes + return round((time() - time_start)/60, +1) + + +def main(arguments): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global usebackup + global time_start + global config_file_path + global dx_session_obj + global debug + + if arguments['--debug']: + debug = True + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + engine = None + single_thread = False + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + #This is the function that will handle processing main_workflow for + # all the servers. + run_job() + + #elapsed_minutes = time_elapsed() + print_info('script took {:.2f} minutes to get this far.'.format( + time_elapsed())) + + #Here we handle what we do when the unexpected happens + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print_exception('Connection failed to the Delphix Engine' + 'Please check the ERROR message:\n{}\n').format(e) + sys.exit(1) + + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so that + we have actionable data + """ + elapsed_minutes = time_elapsed() + print_exception('A job failed in the Delphix Engine') + print_info('{} took {:.2f} minutes to get this far:\n{}\n'.format( + basename(__file__), elapsed_minutes, e)) + sys.exit(3) + + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far\n'.format( + basename(__file__), elapsed_minutes)) + + except: + """ + Everything else gets caught here + """ + print_exception(sys.exc_info()[0]) + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far\n'.format( + basename(__file__), elapsed_minutes)) + sys.exit(1) + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + #Feed our arguments to the main function, and off we go! + main(arguments) diff --git a/v1_8_2/dx_operations_vdb_orig.py b/v1_8_2/dx_operations_vdb_orig.py new file mode 100755 index 0000000..4d49774 --- /dev/null +++ b/v1_8_2/dx_operations_vdb_orig.py @@ -0,0 +1,431 @@ +#!/usr/bin/env python +# Corey Brune - Oct 2016 +#This script starts or stops a VDB +#requirements +#pip install docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. +"""List all VDBs or Start, stop, enable, disable a VDB +Usage: + dx_operations_vdb.py (--vdb [--stop | --start | --enable | --disable] | --list | --all_dbs ) + [-d | --engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_operations_vdb.py -h | --help | -v | --version +List all VDBs, start, stop, enable, disable a VDB + +Examples: + dx_operations_vdb.py --engine landsharkengine --vdb testvdb --stop + dx_operations_vdb.py --vdb testvdb --start + dx_operations_vdb.py --all_dbs enable + dx_operations_vdb.py --all_dbs disable + dx_operations_vdb.py --list + +Options: + --vdb Name of the VDB to stop or start + --start Stop the VDB + --stop Stop the VDB + --all_dbs Enable or disable all dSources and VDBs + --list List all databases from an engine + --enable Enable the VDB + --disable Disable the VDB + -d Identifier of Delphix engine in dxtools.conf. + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_operations_vdb.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = 'v.0.2.301' + +import sys +from os.path import basename +from time import sleep, time +from docopt import docopt +import re + +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy.web import database +from delphixpy.web import job +from delphixpy.web import source +from delphixpy.web import sourceconfig +from delphixpy.web import repository +from delphixpy.web import environment +from delphixpy.web.capacity import consumer + +from lib.DlpxException import DlpxException +from lib.DxLogging import logging_est +from lib.DxLogging import print_debug +from lib.DxLogging import print_info +from lib.DxLogging import print_exception +from lib.GetReferences import find_obj_by_name +from lib.GetSession import GetSession + + +def vdb_operation(vdb_name, operation): + """ + Function to start, stop, enable or disable a VDB + """ + print_debug('Searching for %s reference.\n' % (vdb_name)) + + vdb_obj = find_obj_by_name(dx_session_obj.server_session, source, vdb_name) + + try: + if vdb_obj: + if operation == 'start': + source.start(dx_session_obj.server_session, vdb_obj.reference) + elif operation == 'stop': + source.stop(dx_session_obj.server_session, vdb_obj.reference) + elif operation == 'enable': + source.enable(dx_session_obj.server_session, vdb_obj.reference) + elif operation == 'disable': + source.disable(dx_session_obj.server_session, + vdb_obj.reference) + + except (RequestError, HttpError, JobError, AttributeError), e: + raise DlpxException('An error occurred while performing ' + + operation + ' on ' + vdb_name + '.:%s\n' % (e)) + + +def all_databases(operation): + """ + Enable or disable all dSources and VDBs on an engine + + operation: enable or disable dSources and VDBs + """ + + for db in database.get_all(dx_session_obj.server_session): + # assert isinstance(db.name, object) + print '%s %s\n' % (operation, db.name) + vdb_operation(db.name, operation) + sleep(2) + + +def list_databases(): + """ + Function to list all databases for a given engine + """ + + import pdb;pdb.set_trace() + try: + for db_stats in consumer.get_all(dx_session_obj.server_session): + db_stats_env = repository.get(dx_session_obj.server_session, + find_obj_by_name( + dx_session_obj.server_session,sourceconfig, + db_stats.name).repository) + + env_obj = environment.get(dx_session_obj.server_session, + db_stats_env.environment) + + source_stats = find_obj_by_name(dx_session_obj.server_session, + source, db_stats.name) + + if db_stats.parent == None: + db_stats.parent = 'dSource' + + print('Name = %s\nProvision Container Reference= %s\n' + 'Virtualized Database Disk Usage: %.2f GB\n' + 'Unvirtualized Database Disk Usage: %.2f GB\n' + 'Size of Snapshots: %.2f GB\nEnabled: %s\n' + 'Status:%s\nEnvironment: %s\n' % (str(db_stats.name), + str(db_stats.parent), + db_stats.breakdown.active_space / 1024 / 1024 / 1024, + source_stats.runtime.database_size / 1024 / 1024 / 1024, + db_stats.breakdown.sync_space / 1024 / 1024 / 1024, + source_stats.runtime.enabled, source_stats.runtime.status, + env_obj.name)) + + except (JobError) as e: + #except (RequestError, DlpxException, JobError, AttributeError) as e: + print_exception('An error occurred while listing databases:' + ' \n{}\n'.format((e))) + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + E.g.: + @run_async + def task1(): + do_something + @run_async + def task2(): + do_something_too + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +@run_async +def main_workflow(engine): + """ + This function actually runs the jobs. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + + engine: Dictionary of engines + """ + jobs = {} + + try: + #Setup the connection to the Delphix Engine + dx_session_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + + except DlpxException as e: + print_exception('\nERROR: Engine %s encountered an error while' + '%s:\n%s\n' % (engine['hostname'], + arguments['--target'], e)) + sys.exit(1) + + thingstodo = ["thingtodo"] + #reset the running job count before we begin + i = 0 + with dx_session_obj.job_mode(single_thread): + while (len(jobs) > 0 or len(thingstodo)> 0): + if len(thingstodo)> 0: + try: + if arguments['--start']: + vdb_operation(database_name, 'start') + + elif arguments['--stop']: + vdb_operation(database_name, 'stop') + + elif arguments['--enable']: + vdb_operation(database_name, 'enable') + + elif arguments['--disable']: + vdb_operation(database_name, 'disable') + + elif arguments['--list']: + list_databases() + + elif arguments['--all_dbs']: + if not re.match('disable|enable', + arguments['--all_dbs'].lower()): + raise DlpxException('--all_dbs should be either' + 'enable or disable') + + except DlpxException as e: + print('\nERROR: Could not perform action on the VDB(s)' + '\n%s\n' % e.message) + thingstodo.pop() + + #get all the jobs, then inspect them + i = 0 + for j in jobs.keys(): + job_obj = job.get(dx_session_obj.server_session, jobs[j]) + print_debug(job_obj) + print_info(engine["hostname"] + ": VDB Operations: " + + job_obj.job_state) + + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + #If the job is in a non-running state, remove it from the + # running jobs list. + del jobs[j] + else: + #If the job is in a running state, increment the running + # job count. + i += 1 + + print_info(engine["hostname"] + ": " + str(i) + " jobs running. ") + #If we have running jobs, pause before repeating the checks. + if len(jobs) > 0: + sleep(float(arguments['--poll'])) + + +def run_job(): + """ + This function runs the main_workflow aynchronously against all the servers + specified + """ + #Create an empty list to store threads we create. + threads = [] + + #If the --all argument was given, run against every engine in dxtools.conf + + engine = None + + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + + try: + #For each server in the dxtools.conf... + for delphix_engine in dx_session_obj.dlpx_engines: + engine = dx_session_obj[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + + except DlpxException as e: + print 'Error encountered in run_job():\n%s' % (e) + sys.exit(1) + + elif arguments['--all'] is False: + #Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dx_session_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: %s\n' % + (arguments['--engine'])) + + except (DlpxException, RequestError, KeyError) as e: + raise DlpxException('\nERROR: Delphix Engine %s cannot be ' + 'found in %s. Please check your value ' + 'and try again. Exiting.\n' % ( + arguments['--engine'], config_file_path)) + + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dx_session_obj.dlpx_engines: + if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ + 'true': + + engine = dx_session_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: %s' % ( + dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) + + break + + if engine == None: + raise DlpxException("\nERROR: No default engine found. Exiting") + + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + elapsed_minutes = round((time() - time_start)/60, +1) + return elapsed_minutes + + +def main(argv): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global usebackup + global time_start + global config_file_path + global database_name + global dx_session_obj + global debug + + if arguments['--debug']: + debug = True + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + engine = None + single_thread = False + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + database_name = arguments['--vdb'] + + #This is the function that will handle processing main_workflow for + # all the servers. + run_job() + + elapsed_minutes = time_elapsed() + print_info("script took " + str(elapsed_minutes) + + " minutes to get this far.") + + #Here we handle what we do when the unexpected happens + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print_exception('Connection failed to the Delphix Engine' + 'Please check the ERROR message below') + sys.exit(1) + + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so that + we have actionable data + """ + elapsed_minutes = time_elapsed() + print_exception('A job failed in the Delphix Engine') + print_info('%s took %s minutes to get this far\n' % + (basename(__file__), str(elapsed_minutes))) + sys.exit(3) + + except DlpxException as e: + elapsed_minutes = time_elapsed() + print_info('%s took %s minutes to get this far\n' % + (basename(__file__), str(elapsed_minutes))) + sys.exit(1) + + + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info('%s took %s minutes to get this far\n' % + (basename(__file__), str(elapsed_minutes))) + + except: + """ + Everything else gets caught here + """ + print_exception(sys.exc_info()[0]) + elapsed_minutes = time_elapsed() + print_info('%s took %s minutes to get this far\n' % + (basename(__file__), str(elapsed_minutes))) + sys.exit(1) + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + #Feed our arguments to the main function, and off we go! + main(arguments) diff --git a/v1_8_2/dx_provision_dsource.py b/v1_8_2/dx_provision_dsource.py new file mode 100755 index 0000000..94125d3 --- /dev/null +++ b/v1_8_2/dx_provision_dsource.py @@ -0,0 +1,623 @@ +#!/usr/bin/env python +# Corey Brune - Feb 2017 +#Description: +# Create and sync a dSource +# +#Requirements +#pip install docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. +"""Create and sync a dSource +Usage: + dx_provision_dsource.py (--type ) + dx_provision_dsource.py --type --dsource_name --ip_addr --db_name --env_name --db_install_path --dx_group --db_passwd --db_user [--port_num ][--num_connections ][--link_now ][--files_per_set ][--rman_channels ] + [--engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_provision_dsource.py --type --dsource_name --ase_user --ase_passwd --backup_path --source_user --stage_user aseadmin --stage_repo ASE1570_S2 --src_config --env_name --dx_group [--bck_file ][--create_bckup] + [--engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_provision_dsource.py --type --dsource_name --dx_group --db_passwd --db_user --stage_instance --stage_env --backup_path [--backup_loc_passwd --backup_loc_user --logsync --load_from_backup] + [--engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_provision_dsource.py -h | --help | -v | --version + +Create and sync a dSource +Examples: + Oracle: + dx_provision_dsource.py --type oracle --dsource_name oradb1 --ip_addr 192.168.166.11 --db_name srcDB1 --env_name SourceEnv --db_install_path /u01/app/oracle/product/11.2.0.4/dbhome_1 --db_user delphixdb --db_passwd delphixdb + + Sybase: + dx_provision_dsource.py --type sybase --dsource_name dbw1 --ase_user sa --ase_passwd sybase --backup_path /data/db --source_user aseadmin --stage_user aseadmin --stage_repo ASE1570_S2 --src_config dbw1 --env_name aseSource --dx_group Sources + + Specify backup files: + dx_provision_dsource.py --type sybase --dsource_name dbw2 --ase_user sa --ase_passwd sybase --backup_path /data/db --source_user aseadmin --stage_user aseadmin --stage_repo ASE1570_S2 --src_config dbw2 --env_name aseSource --dx_group Sources --bck_file "dbw2data.dat" + + Create a new backup and ingest: + dx_provision_dsource.py --type sybase --dsource_name dbw2 --ase_user sa --ase_passwd sybase --backup_path /data/db --source_user aseadmin --stage_user aseadmin --stage_repo ASE1570_S2 --src_config dbw2 --env_name aseSource --dx_group Sources --create_bckup + + MSSQL: + dx_provision_dsource.py --type mssql --dsource_name mssql_dsource --dx_group Sources --db_passwd delphix --db_user sa --stage_env mssql_target_svr --stage_instance MSSQLSERVER --backup_path \\bckserver\path\backups --backup_loc_passwd delphix --backup_loc_user delphix + dx_provision_dsource.py --type mssql --dsource_name AdventureWorks2014 --dx_group "9 - Sources" --db_passwd delphixdb --db_user aw --stage_env WINDOWSTARGET --stage_instance MSSQLSERVER --logsync --backup_path auto --load_from_backup + + +Options: + --type dSource type. mssql, sybase or oracle + --ip_addr IP Address of the dSource + --db_name Name of the dSource DB + --env_name Name of the environment where the dSource installed + --db_install_path Location of the installation path of the DB. + --num_connections Number of connections for Oracle RMAN + [default: 5] + --link_now Link the dSource + [default: True] + --files_per_set Configures how many files per set for Oracle RMAN + [default: 5] + --rman_channels Configures the number of Oracle RMAN Channels + [default: 2] + --dx_group Group where the dSource will reside + --create_bckup Create and ingest a new Sybase backup + --db_user Username of the dSource DB + --db_passwd Password of the db_user + --bck_file Fully qualified name of backup file + --port_num Port number of the listener. Default: 1521 + --src_config Name of the configuration environment + --ase_passwd ASE DB password + --ase_user ASE username + --backup_path Path to the ASE/MSSQL backups + --source_user Environment username + --stage_user Stage username + --stage_repo Stage repository + --stage_instance Name of the PPT instance + --stage_env Name of the PPT server + --logsync Enable logsync + --backup_loc_passwd Password of the shared backup path (--bckup_path) + --backup_loc_user User of the shared backup path (--bckup_path) + --load_from_backup If set, Delphix will try to load the most recent full backup (MSSQL only) + --dsource_name Name of the dSource + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_provision_dsource.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = 'v.0.2.0016' + +import sys +from os.path import basename +from time import sleep, time +from docopt import docopt, DocoptExit + +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy.web import sourceconfig +from delphixpy.web import group +from delphixpy.web import job +from delphixpy.web import environment +from delphixpy.web import repository +from delphixpy.web import database +from delphixpy.web.vo import OracleSIConfig +from delphixpy.web.vo import OracleInstance +from delphixpy.web.vo import LinkParameters +from delphixpy.web.vo import OracleLinkData +from delphixpy.web.vo import OracleSourcingPolicy +from delphixpy.web.vo import ASELinkData +from delphixpy.web.vo import ASELatestBackupSyncParameters +from delphixpy.web.vo import ASENewBackupSyncParameters +from delphixpy.web.vo import ASESpecificBackupSyncParameters +from delphixpy.web.vo import MSSqlLinkData +from delphixpy.web.vo import SourcingPolicy + +from lib.DlpxException import DlpxException +from lib.GetReferences import find_obj_by_name +from lib.GetReferences import find_dbrepo +from lib.GetReferences import get_running_job +from lib.DxLogging import logging_est +from lib.DxLogging import print_debug +from lib.DxLogging import print_info +from lib.DxLogging import print_exception +from lib.GetSession import GetSession + + +def create_ora_sourceconfig(engine_name, port_num=1521): + """ + :param ip_addr: + :param db_name: + :return: + """ + create_ret = None + env_obj = find_obj_by_name(dx_session_obj.server_session, environment, + arguments['--env_name']) + + try: + sourceconfig_ref = find_obj_by_name(dx_session_obj.server_session, + sourceconfig, + arguments['--db_name']).reference + except DlpxException: + sourceconfig_ref = None + + repo_ref = find_dbrepo(dx_session_obj.server_session, + 'OracleInstall', env_obj.reference, + arguments['--db_install_path']).reference + + dsource_params = OracleSIConfig() + + connect_str = ('jdbc:oracle:thin:@' + arguments['--ip_addr'] + ':' + + str(port_num) + ':' + arguments['--db_name']) + + dsource_params.database_name = arguments['--db_name'] + dsource_params.unique_name = arguments['--db_name'] + dsource_params.repository = repo_ref + dsource_params.instance = OracleInstance() + dsource_params.instance.instance_name = arguments['--db_name'] + dsource_params.instance.instance_number = 1 + dsource_params.services = [{'type': 'OracleService', + 'jdbcConnectionString': connect_str}] + + try: + if sourceconfig_ref is None: + create_ret = link_ora_dsource(sourceconfig.create( + dx_session_obj.server_session, dsource_params), + env_obj.primary_user) + elif sourceconfig_ref is not None: + create_ret = link_ora_dsource(sourceconfig_ref, + env_obj.primary_user) + + print_info('Created and linked the dSource {} with reference {}.\n'.format( + arguments['--db_name'], create_ret)) + link_job_ref = dx_session_obj.server_session.last_job + link_job_obj = job.get(dx_session_obj.server_session, + link_job_ref) + while link_job_obj.job_state not in ["CANCELED", "COMPLETED", "FAILED"]: + print_info('Waiting three seconds for link job to complete, and sync to begin') + sleep(3) + link_job_obj = job.get(dx_session_obj.server_session, + link_job_ref) + + #Add the snapsync job to the jobs dictionary + dx_session_obj.jobs[engine_name + 'snap'] = get_running_job( + dx_session_obj.server_session, find_obj_by_name( + dx_session_obj.server_session, database, + arguments['--dsource_name']).reference) + print_debug('Snapshot Job Reference: {}.\n'.format( + dx_session_obj.jobs[engine_name + 'snap'])) + except (HttpError, RequestError) as e: + print_exception('ERROR: Could not create the sourceconfig:\n' + '{}'.format(e)) + sys.exit(1) + + +def link_ora_dsource(srcconfig_ref, primary_user_ref): + """ + :param srcconfig_ref: Reference to the sourceconfig object + :param primary_user_ref: Reference to the environment user + :return: Reference of the linked dSource + """ + + link_params = LinkParameters() + link_params.link_data = OracleLinkData() + link_params.link_data.sourcing_policy = OracleSourcingPolicy() + link_params.name = arguments['--dsource_name'] + link_params.group = find_obj_by_name(dx_session_obj.server_session, group, + arguments['--dx_group']).reference + link_params.link_data.compressedLinkingEnabled = True + link_params.link_data.environment_user = primary_user_ref + link_params.link_data.db_user = arguments['--db_user'] + link_params.link_data.number_of_connections = \ + int(arguments['--num_connections']) + link_params.link_data.link_now = bool(arguments['--link_now']) + link_params.link_data.files_per_set = int(arguments['--files_per_set']) + link_params.link_data.rman_channels = int(arguments['--rman_channels']) + link_params.link_data.db_credentials = {'type': 'PasswordCredential', + 'password': + arguments['--db_passwd']} + link_params.link_data.sourcing_policy.logsync_enabled = True + #link_params.link_data.sourcing_policy.logsync_mode = 'ARCHIVE_REDO_MODE' + link_params.link_data.config = srcconfig_ref + try: + return database.link(dx_session_obj.server_session, link_params) + except (RequestError, HttpError) as e: + print_exception('Database link failed for {}:\n{}\n'.format( + arguments['--dsource_name'], e)) + sys.exit(1) + + +def link_mssql_dsource(engine_name): + """ + Link an MSSQL dSource + """ + link_params = LinkParameters() + link_params.name = arguments['--dsource_name'] + link_params.link_data = MSSqlLinkData() + + try: + env_obj_ref = find_obj_by_name(dx_session_obj.server_session, + environment, + arguments['--stage_env']).reference + + link_params.link_data.ppt_repository = find_dbrepo( + dx_session_obj.server_session, 'MSSqlInstance', env_obj_ref, + arguments['--stage_instance']).reference + link_params.link_data.config = find_obj_by_name( + dx_session_obj.server_session, sourceconfig, + arguments['--dsource_name']).reference + link_params.group = find_obj_by_name(dx_session_obj.server_session, + group, + arguments['--dx_group']).reference + + except DlpxException as e: + print_exception('Could not link {}: {}\n'.format( + arguments['--dsource_name'], e)) + sys.exit(1) + + if arguments['--backup_path'] != "auto": + link_params.link_data.shared_backup_location = arguments['--backup_path'] + + if arguments['--backup_loc_passwd']: + link_params.link_data.backup_location_credentials = {'type': + 'PasswordCredential', + 'password': + arguments['--backup_loc_passwd']} + link_params.link_data.backup_location_user = \ + arguments['--backup_loc_user'] + + link_params.link_data.db_credentials = {'type': 'PasswordCredential', + 'password': + arguments['--db_passwd']} + link_params.link_data.db_user = arguments['--db_user'] + + link_params.link_data.sourcing_policy = SourcingPolicy() + + if arguments['--load_from_backup']: + link_params.link_data.sourcing_policy.load_from_backup = True + + if arguments['--logsync']: + link_params.link_data.sourcing_policy.logsync_enabled = True + + try: + database.link(dx_session_obj.server_session, link_params) + dx_session_obj.jobs[engine_name] = dx_session_obj.server_session.last_job + dx_session_obj.jobs[engine_name + 'snap'] = get_running_job( + dx_session_obj.server_session, find_obj_by_name( + dx_session_obj.server_session, database, + arguments['--dsource_name']).reference) + + except (HttpError, RequestError, JobError) as e: + print_exception('Database link failed for {}:\n{}\n'.format( + arguments['--dsource_name'], e)) + + +def link_ase_dsource(engine_name): + """ + Link an ASE dSource + """ + + link_params = LinkParameters() + link_params.name = arguments['--dsource_name'] + link_params.link_data = ASELinkData() + link_params.link_data.db_credentials = {'type': 'PasswordCredential', + 'password': + arguments['--ase_passwd']} + link_params.link_data.db_user = arguments['--ase_user'] + link_params.link_data.load_backup_path = arguments['--backup_path'] + + if arguments['--bck_file']: + link_params.link_data.sync_parameters = \ + ASESpecificBackupSyncParameters() + bck_files = (arguments['--bck_file']).split(' ') + link_params.link_data.sync_parameters.backup_files = bck_files + + elif arguments['--create_bckup']: + link_params.link_data.sync_parameters = ASENewBackupSyncParameters() + + else: + link_params.link_data.sync_parameters = ASELatestBackupSyncParameters() + + try: + link_params.group = find_obj_by_name( + dx_session_obj.server_session, group, + arguments['--dx_group']).reference + env_user_ref = link_params.link_data.stage_user = find_obj_by_name( + dx_session_obj.server_session, environment, + arguments['--env_name']).primary_user + link_params.link_data.staging_host_user = env_user_ref + link_params.link_data.source_host_user = env_user_ref + + link_params.link_data.config = find_obj_by_name( + dx_session_obj.server_session, sourceconfig, + arguments['--src_config']).reference + link_params.link_data.staging_repository = find_obj_by_name( + dx_session_obj.server_session, repository, + arguments['--stage_repo']).reference + + except DlpxException as e: + print_exception('Could not link {}: {}\n'.format( + arguments['--dsource_name'], e)) + sys.exit(1) + + try: + dsource_ref = database.link(dx_session_obj.server_session, link_params) + dx_session_obj.jobs[engine_name] = dx_session_obj.server_session.last_job + dx_session_obj.jobs[engine_name + 'snap'] = get_running_job( + dx_session_obj.server_session, find_obj_by_name( + dx_session_obj.server_session, database, + arguments['--dsource_name']).reference) + print '{} sucessfully linked {}'.format(dsource_ref, + arguments['--dsource_name']) + except (RequestError, HttpError) as e: + print_exception('Database link failed for {}:\n{}'.format( + arguments['--dsource_name'], e)) + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + E.g.: + @run_async + def task1(): + do_something + @run_async + def task2(): + do_something_too + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +@run_async +def main_workflow(engine): + """ + This function actually runs the jobs. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + + engine: Dictionary of engines + """ + jobs = {} + + try: + #Setup the connection to the Delphix Engine + dx_session_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + + except DlpxException as e: + print_exception('\nERROR: Engine {} encountered an error while' + '{}:\n{}\n'.format( + dx_session_obj.dlpx_engines['hostname'], + arguments['--target'], e)) + sys.exit(1) + thingstodo = ["thingtodo"] + try: + with dx_session_obj.job_mode(single_thread): + while (len(dx_session_obj.jobs) > 0 or len(thingstodo)> 0): + if len(thingstodo) > 0: + if arguments['--type'].lower() == 'oracle': + create_ora_sourceconfig(engine["hostname"]) + elif arguments['--type'].lower() == 'sybase': + link_ase_dsource(engine["hostname"]) + elif arguments['--type'].lower() == 'mssql': + link_mssql_dsource(engine["hostname"]) + thingstodo.pop() + # get all the jobs, then inspect them + i = 0 + for j in dx_session_obj.jobs.keys(): + job_obj = job.get(dx_session_obj.server_session, + dx_session_obj.jobs[j]) + print_debug(job_obj) + print_info('{}: Provisioning dSource: {}'.format( + engine['hostname'], job_obj.job_state)) + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + # If the job is in a non-running state, remove it + # from the + # running jobs list. + del dx_session_obj.jobs[j] + elif job_obj.job_state in 'RUNNING': + # If the job is in a running state, increment the + # running job count. + i += 1 + print_info('{}: {:d} jobs running.'.format( + engine['hostname'], i)) + # If we have running jobs, pause before repeating the + # checks. + if len(dx_session_obj.jobs) > 0: + sleep(float(arguments['--poll'])) + + except (HttpError, RequestError, JobError, DlpxException) as e: + print_exception('ERROR: Could not complete ingesting the source ' + 'data:\n{}'.format(e)) + sys.exit(1) + + + +def run_job(): + """ + This function runs the main_workflow aynchronously against all the servers + specified + """ + #Create an empty list to store threads we create. + threads = [] + engine = None + + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + + try: + #For each server in the dxtools.conf... + for delphix_engine in dx_session_obj.dlpx_engines: + engine = dx_session_obj[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + + except DlpxException as e: + print 'Error encountered in run_job():\n%s' % (e) + sys.exit(1) + + elif arguments['--all'] is False: + #Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dx_session_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: {}\n'.format( + (arguments['--engine']))) + + except (DlpxException, RequestError, KeyError) as e: + raise DlpxException('\nERROR: Delphix Engine {} cannot be ' + 'found in {}. Please check your value ' + 'and try again. Exiting.\n'.format( + arguments['--engine'], config_file_path)) + + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dx_session_obj.dlpx_engines: + if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ + 'true': + + engine = dx_session_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: %s' % ( + dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) + + break + + if engine == None: + raise DlpxException("\nERROR: No default engine found. Exiting") + + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + elapsed_minutes = round((time() - time_start)/60, +1) + return elapsed_minutes + + +def main(argv): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global usebackup + global time_start + global config_file_path + global database_name + global dx_session_obj + global debug + + if arguments['--debug']: + debug = True + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + engine = None + single_thread = False + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + #This is the function that will handle processing main_workflow for + # all the servers. + run_job() + + elapsed_minutes = time_elapsed() + print_info('script took {} minutes to get this far.'.format( + str(elapsed_minutes))) + + #Here we handle what we do when the unexpected happens + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print_exception('Connection failed to the Delphix Engine' + 'Please check the ERROR message below:\n{}'.format(e)) + sys.exit(1) + + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so that + we have actionable data + """ + elapsed_minutes = time_elapsed() + print_exception('A job failed in the Delphix Engine') + print_info('{} took {:.2f} minutes to get this far:\n{}\n'.format( + basename(__file__), elapsed_minutes, e)) + sys.exit(3) + + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far\n'.format( + basename(__file__), elapsed_minutes)) + + except: + """ + Everything else gets caught here + """ + print_exception(sys.exc_info()[0]) + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far\n'.format( + basename(__file__), elapsed_minutes)) + sys.exit(1) + +if __name__ == "__main__": + + try: + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + #Feed our arguments to the main function, and off we go! + main(arguments) + + except DocoptExit as e: + #print 'Exited because options were not specified: {}\n'.format(e) + print (e.message) + diff --git a/v1_8_2/dx_provision_vdb.py b/v1_8_2/dx_provision_vdb.py new file mode 100755 index 0000000..8a4580c --- /dev/null +++ b/v1_8_2/dx_provision_vdb.py @@ -0,0 +1,1140 @@ +#!/usr/bin/env python +#Adam Bowen - Apr 2016 +#This script provisions a vdb or dSource +# Updated by Corey Brune Aug 2016 +# --- Create vFiles VDB +#requirements +#pip install docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. + +#TODO: +# Refactor provisioning functions +# Documentation + +"""Provision VDB's +Usage: + dx_provision_db.py --source --target_grp --target + (--db | --vfiles_path ) [--no_truncate_log] + (--environment --type ) [ --envinst ] + [--template ] [--mapfile ] + [--timestamp_type ] [--timestamp ] + [--timeflow ] + [--instname ] [--mntpoint ] [--noopen] + [--uniqname ][--source_grp ] + [--engine | --all] + [--vdb_restart ] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + [--postrefresh ] [--prerefresh ] + [--configure-clone ] + [--prerollback ] [--postrollback ] + dx_provision_db.py -h | --help | -v | --version +Provision VDB from a defined source on the defined target environment. + +Examples: + dx_provision_vdb.py --engine landsharkengine --source_grp Sources --source "ASE pubs3 DB" --db vase --target testASE --target_grp Analytics --environment LINUXTARGET --type ase --envinst "LINUXTARGET" + + dx_provision_vdb.py --source_grp Sources --source "Employee Oracle 11G DB" --instname autod --uniqname autoprod --db autoprod --target autoprod --target_grp Analytics --environment LINUXTARGET --type oracle --envinst "/u01/app/oracle/product/11.2.0/dbhome_1" + + dx_provision_vdb.py --source_grp Sources --source "AdventureWorksLT2008R2" --db vAW --target testAW --target_grp Analytics --environment WINDOWSTARGET --type mssql --envinst MSSQLSERVER --all + + dx_provision_vdb.py --source UF_Source --target appDataVDB --target_grp Untitled --environment LinuxTarget --type vfiles --vfiles_path /mnt/provision/appDataVDB --prerollback "/u01/app/oracle/product/scripts/PreRollback.sh" --postrollback "/u01/app/oracle/product/scripts/PostRollback.sh" --vdb_restart true + +Options: + --source_grp The group where the source resides. + --source Name of the source object + --target_grp The group into which Delphix will place the VDB. + --target The unique name that you want to call this object + in Delphix + --db The name you want to give the database (Oracle Only) + --vfiles_path The full path on the Target server where Delphix + will provision the vFiles + --no_truncate_log Don't truncate log on checkpoint (ASE only) + --environment The name of the Target environment in Delphix + --type The type of VDB this is. + oracle | mssql | ase | vfiles + --prerefresh Pre-Hook commands + --postrefresh Post-Hook commands + --prerollback Post-Hook commands + --postrollback Post-Hook commands + --configure-clone Configure Clone commands + --vdb_restart Either True or False. Default: False + --envinst The identifier of the instance in Delphix. + ex. "/u01/app/oracle/product/11.2.0/dbhome_1" + ex. LINUXTARGET + --timeflow Name of the timeflow from which you are provisioning + --timestamp_type The type of timestamp you are specifying. + Acceptable Values: TIME, SNAPSHOT + [default: SNAPSHOT] + --timestamp + The Delphix semantic for the point in time from + which you want to provision your VDB. + Formats: + latest point in time or snapshot: LATEST + point in time: "YYYY-MM-DD HH24:MI:SS" + snapshot name: "@YYYY-MM-DDTHH24:MI:SS.ZZZ" + snapshot time from GUI: "YYYY-MM-DD HH24:MI" + [default: LATEST] + --template Target VDB Template name (Oracle Only) + --mapfile Target VDB mapping file (Oracle Only) + --instname Target VDB SID name (Oracle Only) + --uniqname Target VDB db_unique_name (Oracle Only) + --mntpoint Mount point for the VDB + [default: /mnt/provision] + --noopen Don't open database after provision (Oracle Only) + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_provision_vdb.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = 'v.0.2.305' + +import signal +import sys +import time +import traceback +import re +from docopt import docopt +from os.path import basename +from time import sleep, time + +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy.web import database +from delphixpy.web import environment +from delphixpy.web import group +from delphixpy.web import job +from delphixpy.web import repository +from delphixpy.web import snapshot +from delphixpy.web import source +from delphixpy.web.database import template +from delphixpy.web.vo import VirtualSourceOperations +from delphixpy.web.vo import OracleDatabaseContainer +from delphixpy.web.vo import OracleInstance +from delphixpy.web.vo import OracleProvisionParameters +from delphixpy.web.vo import OracleSIConfig +from delphixpy.web.vo import OracleVirtualSource +from delphixpy.web.vo import TimeflowPointLocation +from delphixpy.web.vo import TimeflowPointSemantic +from delphixpy.web.vo import TimeflowPointTimestamp +from delphixpy.web.vo import ASEDBContainer +from delphixpy.web.vo import ASEInstanceConfig +from delphixpy.web.vo import ASEProvisionParameters +from delphixpy.web.vo import ASESIConfig +from delphixpy.web.vo import ASEVirtualSource +from delphixpy.web.vo import MSSqlProvisionParameters +from delphixpy.web.vo import MSSqlDatabaseContainer +from delphixpy.web.vo import MSSqlVirtualSource +from delphixpy.web.vo import MSSqlSIConfig +from delphixpy.web.vo import AppDataVirtualSource +from delphixpy.web.vo import AppDataProvisionParameters +from delphixpy.web.vo import AppDataDirectSourceConfig + +from lib.DxTimeflow import DxTimeflow +from lib.DlpxException import DlpxException +from lib.GetSession import GetSession +from lib.GetReferences import find_dbrepo +from lib.GetReferences import find_obj_by_name +from lib.DxLogging import logging_est +from lib.DxLogging import print_info +from lib.DxLogging import print_debug + + +def create_ase_vdb(engine, server, jobs, vdb_group, vdb_name, environment_obj, + container_obj): + ''' + Create a Sybase ASE VDB + ''' + vdb_obj = find_database_by_name_and_group_name(engine, server, + vdb_group.name, vdb_name) + if vdb_obj == None: + vdb_params = ASEProvisionParameters() + vdb_params.container = ASEDBContainer() + if arguments['--no_truncate_log']: + vdb_params.truncate_log_on_checkpoint = False + else: + vdb_params.truncate_log_on_checkpoint = True + vdb_params.container.group = vdb_group.reference + vdb_params.container.name = vdb_name + vdb_params.source = ASEVirtualSource() + vdb_params.source_config = ASESIConfig() + vdb_params.source_config.database_name = arguments['--db'] + vdb_params.source_config.instance = ASEInstanceConfig() + vdb_params.source_config.instance.host = environment_obj.host + + vdb_repo = find_dbrepo_by_environment_ref_and_name(engine, server, + "ASEInstance", + environment_obj.reference, + arguments['--envinst']) + + vdb_params.source_config.repository = vdb_repo.reference + vdb_params.timeflow_point_parameters = set_timeflow_point(engine, + server, + container_obj) + + vdb_params.timeflow_point_parameters.container = container_obj.reference + print_info("Provisioning " + vdb_name) + database.provision(server, vdb_params) + + #Add the job into the jobs dictionary so we can track its progress + jobs[engine["hostname"]] = server.last_job + #return the job object to the calling statement so that we can tell if + # a job was created or not (will return None, if no job) + return server.last_job + else: + print_info(engine["hostname"] + ": " + vdb_name + " already exists.") + return vdb_obj.reference + + +def create_mssql_vdb(engine, jobs, vdb_group, vdb_name, + environment_obj, container_obj): + ''' + Create a MSSQL VDB + engine: + jobs: + vdb_group: + vdb_name, + environment_obj: + container_obj: + + ''' + vdb_obj = find_database_by_name_and_group_name(engine, dx_session_obj.server_session, + vdb_group.name, vdb_name) + if vdb_obj == None: + vdb_params = MSSqlProvisionParameters() + vdb_params.container = MSSqlDatabaseContainer() + vdb_params.container.group = vdb_group.reference + vdb_params.container.name = vdb_name + vdb_params.source = MSSqlVirtualSource() + vdb_params.source.allow_auto_vdb_restart_on_host_reboot = False + vdb_params.source_config = MSSqlSIConfig() + vdb_params.source_config.database_name = arguments['--db'] + + vdb_params.source_config.repository = find_dbrepo( + dx_session_obj.server_session, 'MSSqlInstance', environment_obj.reference, + arguments['--envinst']).reference + + vdb_params.timeflow_point_parameters = set_timeflow_point(engine, + dx_session_obj.server_session, + container_obj) + if not vdb_params.timeflow_point_parameters: + return + vdb_params.timeflow_point_parameters.container = \ + container_obj.reference + print_info(engine["hostname"] + ":Provisioning " + vdb_name) + database.provision(dx_session_obj.server_session, vdb_params) + #Add the job into the jobs dictionary so we can track its progress + jobs[engine["hostname"]] = dx_session_obj.server_session.last_job + #return the job object to the calling statement so that we can tell if + # a job was created or not (will return None, if no job) + return dx_session_obj.server_session.last_job + else: + print_info(engine["hostname"] + ": " + vdb_name + " already exists.") + return vdb_obj.reference + + +def create_vfiles_vdb(engine, jobs, vfiles_group, vfiles_name, + environment_obj, container_obj, pre_refresh=None, + post_refresh=None, pre_rollback=None, + post_rollback=None, configure_clone=None): + ''' + Create a Vfiles VDB + ''' + + vfiles_obj = None + + try: + vfiles_obj = find_obj_by_name(dx_session_obj.server_session, + database, vfiles_name) + except DlpxException: + pass + + if vfiles_obj is None: + vfiles_repo = find_repo_by_environment_ref(engine, + 'Unstructured Files', + environment_obj.reference) + + vfiles_params = AppDataProvisionParameters() + vfiles_params.source = AppDataVirtualSource() + vfiles_params.source_config = AppDataDirectSourceConfig() + + vdb_restart_reobj = re.compile('true', re.IGNORECASE) + + if vdb_restart_reobj.search(str(arguments['--vdb_restart'])): + vfiles_params.source.allow_auto_vdb_restart_on_host_reboot = True + + elif vdb_restart_reobj.search(str(arguments['--vdb_restart'])) is None: + vfiles_params.source.allow_auto_vdb_restart_on_host_reboot = False + + vfiles_params.container = { 'type': 'AppDataContainer', + 'group': vfiles_group.reference, + 'name': vfiles_name } + + vfiles_params.source_config.name = arguments['--target'] + vfiles_params.source_config.path = arguments['--vfiles_path'] + vfiles_params.source_config.environment_user = \ + environment_obj.primary_user + vfiles_params.source_config.repository = vfiles_repo.reference + + + vfiles_params.source.parameters = {} + vfiles_params.source.name = vfiles_name + vfiles_params.source.name = vfiles_name + vfiles_params.source.operations = VirtualSourceOperations() + + if pre_refresh: + vfiles_params.source.operations.pre_refresh = [{ 'type': + 'RunCommandOnSourceOperation', + 'command': pre_refresh }] + + if post_refresh: + vfiles_params.source.operations.post_refresh = [{ 'type': + 'RunCommandOnSourceOperation', + 'command': post_refresh }] + + if pre_rollback: + vfiles_params.source.operations.pre_rollback = [{ 'type': + 'RunCommandOnSourceOperation', + 'command': pre_rollback }] + + if post_rollback: + vfiles_params.source.operations.post_rollback = [{ 'type': + 'RunCommandOnSourceOperation', + 'command': post_rollback }] + + if configure_clone: + vfiles_params.source.operations.configure_clone = [{ 'type': + 'RunCommandOnSourceOperation', + 'command': configure_clone }] + + if arguments['--timestamp_type'] is None: + vfiles_params.timeflow_point_parameters = { + 'type': 'TimeflowPointSemantic', + 'container': container_obj.reference, + 'location': 'LATEST_POINT'} + + elif arguments['--timestamp_type'].upper() == 'SNAPSHOT': + + try: + dx_timeflow_obj = DxTimeflow(dx_session_obj.server_session) + dx_snap_params = dx_timeflow_obj.set_timeflow_point( + container_obj, + arguments['--timestamp_type'], + arguments['--timestamp'], + arguments['--timeflow']) + + except RequestError as e: + raise DlpxException('Could not set the timeflow point:\n%s' + % (e)) + + if dx_snap_params.type == 'TimeflowPointSemantic': + vfiles_params.timeflow_point_parameters = {'type': + dx_snap_params.type, + 'container': + dx_snap_params.container, + 'location': + dx_snap_params.location} + + elif dx_snap_params.type == 'TimeflowPointTimestamp': + vfiles_params.timeflow_point_parameters = {'type': + dx_snap_params.type, + 'timeflow': + dx_snap_params.timeflow, + 'timestamp': + dx_snap_params.timestamp} + + print_info('%s: Provisioning %s\n' % (engine["hostname"], + vfiles_name)) + + try: + database.provision(dx_session_obj.server_session, vfiles_params) + + except (JobError, RequestError, HttpError) as e: + raise DlpxException('\nERROR: Could not provision the database:' + '\n%s' % (e)) + + + #Add the job into the jobs dictionary so we can track its progress + jobs[engine['hostname']] = dx_session_obj.server_session.last_job + + #return the job object to the calling statement so that we can tell if + # a job was created or not (will return None, if no job) + return dx_session_obj.server_session.last_job + else: + print_info('\nERROR %s: %s already exists. \n' % (engine['hostname'], + vfiles_name)) + return vfiles_obj.reference + + +def create_oracle_si_vdb(engine, jobs, vdb_name, vdb_group_obj, + environment_obj, container_obj, pre_refresh=None, + post_refresh=None, pre_rollback=None, + post_rollback=None, configure_clone=None): + + ''' + Create an Oracle SI VDB + ''' + + vdb_obj = None + + try: + vdb_obj = find_obj_by_name(dx_session_obj.server_session, database, + vdb_name) + except DlpxException: + pass + + if vdb_obj == None: + vdb_params = OracleProvisionParameters() + vdb_params.open_resetlogs = True + + if arguments['--noopen']: + vdb_params.open_resetlogs = False + + vdb_params.container = OracleDatabaseContainer() + vdb_params.container.group = vdb_group_obj.reference + vdb_params.container.name = vdb_name + vdb_params.source = OracleVirtualSource() + vdb_params.source.allow_auto_vdb_restart_on_host_reboot = False + + if arguments['--instname']: + inst_name = arguments['--instname'] + elif arguments['--instname'] == None: + inst_name = vdb_name + + if arguments['--uniqname']: + unique_name = arguments['--uniqname'] + elif arguments['--uniqname'] == None: + unique_name = vdb_name + + if arguments['--db']: + db = arguments['--db'] + elif arguments['--db'] == None: + db = vdb_name + + vdb_params.source.mount_base = arguments['--mntpoint'] + + if arguments['--mapfile']: + vdb_params.source.file_mapping_rules = arguments['--mapfile'] + + if arguments['--template']: + template_obj = find_obj_by_name(dx_session_obj.server_session, + database.template, + arguments['--template']) + + vdb_params.source.config_template = template_obj.reference + + vdb_params.source_config = OracleSIConfig() + vdb_params.source.operations = VirtualSourceOperations() + + if pre_refresh: + vdb_params.source.operations.pre_refresh = [{ 'type': + 'RunCommandOnSourceOperation', + 'command': pre_refresh }] + + if post_refresh: + vdb_params.source.operations.post_refresh = [{ 'type': + 'RunCommandOnSourceOperation', + 'command': post_refresh }] + + if pre_rollback: + vdb_params.source.operations.pre_rollback = [{ 'type': + 'RunCommandOnSourceOperation', + 'command': pre_rollback }] + + if post_rollback: + vdb_params.source.operations.post_rollback = [{ 'type': + 'RunCommandOnSourceOperation', + 'command': post_rollback }] + + if configure_clone: + vdb_params.source.operations.configure_clone = [{ 'type': + 'RunCommandOnSourceOperation', + 'command': configure_clone }] + + vdb_repo = find_dbrepo_by_environment_ref_and_install_path(engine, + dx_session_obj.server_session, + 'OracleInstall', + environment_obj.reference, + arguments['--envinst']) + + vdb_params.source_config.database_name = db + vdb_params.source_config.unique_name = unique_name + vdb_params.source_config.instance = OracleInstance() + vdb_params.source_config.instance.instance_name = inst_name + vdb_params.source_config.instance.instance_number = 1 + vdb_params.source_config.repository = vdb_repo.reference + + dx_timeflow_obj = DxTimeflow(dx_session_obj.server_session) + vdb_params.timeflow_point_parameters = \ + dx_timeflow_obj.set_timeflow_point(container_obj, + arguments['--timestamp_type'], + arguments['--timestamp']) + + print vdb_params, '\n\n\n' + print_info(engine["hostname"] + ": Provisioning " + vdb_name) + database.provision(dx_session_obj.server_session, vdb_params) + #Add the job into the jobs dictionary so we can track its progress + + jobs[engine['hostname']] = dx_session_obj.server_session.last_job + #return the job object to the calling statement so that we can tell if + # a job was created or not (will return None, if no job) + + return dx_session_obj.server_session.last_job + + else: + raise DlpxException('\nERROR: %s: %s alread exists\n' % + (engine['hostname'], vdb_name)) + + +def find_all_databases_by_group_name(engine, server, group_name, + exclude_js_container=False): + """ + Easy way to quickly find databases by group name + """ + + #First search groups for the name specified and return its reference + group_obj = find_obj_by_name(dx_session_obj.server_session, group, + group_name) + if group_obj: + databases=database.get_all(server, group=group_obj.reference, + no_js_container_data_source=exclude_js_container) + return databases + + +def find_database_by_name_and_group_name(engine, server, group_name, + database_name): + + databases = find_all_databases_by_group_name(engine, server, group_name) + + for each in databases: + if each.name == database_name: + print_debug('%s: Found a match %s' % (engine['hostname'], + str(each.reference))) + return each + + print_info('%s unable to find %s in %s' % (engine['hostname'], + database_name, group_name)) + + +def find_dbrepo_by_environment_ref_and_install_path(engine, server, + install_type, + f_environment_ref, + f_install_path): + ''' + Function to find database repository objects by environment reference and + install path, and return the object's reference as a string + You might use this function to find Oracle and PostGreSQL database repos. + ''' + print_debug('%s: Searching objects in the %s class for one with the ' + 'environment reference of %s and an install path of %s' % + (engine['hostname'], install_type, f_environment_ref, + f_install_path), debug) + + for obj in repository.get_all(server, environment=f_environment_ref): + if install_type == 'PgSQLInstall': + if (obj.type == install_type and + obj.installation_path == f_install_path): + print_debug('%s: Found a match %s' % (engine['hostname'], + str(obj.reference)), debug) + return obj + + elif install_type == 'OracleInstall': + if (obj.type == install_type and + obj.installation_home == f_install_path): + + print_debug('%s: Fount a match %s' % (engine['hostname'], + str(obj.reference)), debug) + return obj + else: + raise DlpxException('%s: No Repo match found for type %s.\n' % + (engine["hostname"], install_type)) + + +def find_repo_by_environment_ref(engine, repo_type, f_environment_ref, + f_install_path=None): + ''' + Function to find unstructured file repository objects by environment + reference and name, and return the object's reference as a string + You might use this function to find Unstructured File repos. + ''' + + print_debug('\n%s: Searching objects in the %s class for one with the' + 'environment reference of %s\n' % + (engine['hostname'], repo_type, f_environment_ref), debug) + + obj_ref = '' + all_objs = repository.get_all(dx_session_obj.server_session, + environment=f_environment_ref) + + for obj in all_objs: + if obj.name == repo_type: + print_debug(engine['hostname'] + ': Found a match ' + + str(obj.reference)) + return obj + + elif obj.type == repo_type: + print_debug('%s Found a match %s' % (engine['hostname'], + str(obj.reference)), debug) + return obj + + raise DlpxException('%s: No Repo match found for type %s\n' % ( + engine['hostname'], repo_type)) + + +def find_dbrepo_by_environment_ref_and_name(engine, repo_type, + f_environment_ref, f_name): + ''' + Function to find database repository objects by environment reference and + name, and return the object's reference as a string + You might use this function to find MSSQL database repos. + ''' + + print_debug('%s: Searching objects in the %s class for one with the ' + 'environment reference of %s and a name of %s.' % + (engine['hostname'], repo_type, f_environment_ref, f_name), + debug) + + obj_ref = '' + all_objs = repository.get_all(server, environment=f_environment_ref) + + for obj in all_objs: + if (repo_type == 'MSSqlInstance' or repo_type == 'ASEInstance'): + if (obj.type == repo_type and obj.name == f_name): + print_debug('%s: Found a match %s' % (engine['hostname'], + str(obj.reference)), debug) + return obj + + elif repo_type == 'Unstructured Files': + if obj.value == install_type: + print_debug('%s: Found a match %s' % (engine['hostname'], + str(obj.reference)), debug) + return obj + + raise DlpxException('%s: No Repo match found for type %s\n' % + (engine['hostname'], repo_type)) + + +def find_snapshot_by_database_and_name(engine, database_obj, snap_name): + """ + Find snapshots by database and name. Return snapshot reference. + + engine: Dictionary of engines from config file. + database_obj: Database object to find the snapshot against + snap_name: Name of the snapshot + """ + + snapshots = snapshot.get_all(dx_session_obj.server_session, + database=database_obj.reference) + matches = [] + for snapshot_obj in snapshots: + if str(snapshot_obj.name).startswith(arguments['--timestamp']): + matches.append(snapshot_obj) + + for each in matches: + print_debug(each.name, debug) + + if len(matches) == 1: + print_debug('%s: Found one and only one match. This is good.\n %s' % + (engine['hostname'], matches[0]), debug) + return matches[0] + + elif len(matches) > 1: + raise DlpxException('%s: The name specified was not specific enough.' + ' More than one match found.\n' % + (engine['hostname'],)) + + else: + raise DlpxException('%s: No matches found for the time specified.\n' + % (engine['hostname'])) + + +def find_snapshot_by_database_and_time(engine, database_obj, snap_time): + snapshots = snapshot.get_all(dx_session_obj.server_session, + database=database_obj.reference) + matches = [] + + for snapshot_obj in snapshots: + if str(snapshot_obj.latest_change_point.timestamp).startswith(arguments['--timestamp']): + + matches.append(snapshot_obj) + + if len(matches) == 1: + print_debug('%s": Found one and only one match. This is good.\n%s' % + (engine['hostname'], matches[0]), debug) + + return matches[0] + + elif len(matches) > 1: + print_debug(matches, debug) + + raise DlpxException('%s: The time specified was not specific enough.' + 'More than one match found.\n' % + (engine['hostname'])) + else: + raise DlpxException('%s: No matches found for the time specified.\n' + % (engine['hostname'])) + + +def find_source_by_database(engine, database_obj): + #The source tells us if the database is enabled/disables, virtual, + # vdb/dSource, or is a staging database. + source_obj = source.get_all(server, database=database_obj.reference) + + #We'll just do a little sanity check here to ensure we only have a 1:1 + # result. + if len(source_obj) == 0: + raise DlpxException('%s: Did not find a source for %s. Exiting.\n' % + (engine['hostname'], database_obj.name)) + + elif len(source_obj) > 1: + raise DlpxException('%s: More than one source returned for %s. ' + 'Exiting.\n' % (engine['hostname'], + database_obj.name + ". Exiting")) + return source_obj + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + E.g.: + @run_async + def task1(): + do_something + @run_async + def task2(): + do_something_too + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +@run_async +def main_workflow(engine): + """ + This function actually runs the jobs. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + + engine: Dictionary containing engine information + """ + + #Establish these variables as empty for use later + environment_obj = None + source_objs = None + jobs = {} + + try: + #Setup the connection to the Delphix Engine + dx_session_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + + group_obj = find_obj_by_name(dx_session_obj.server_session, group, + arguments['--target_grp']) + + #Get the reference of the target environment. + print_debug('Getting environment for %s\n' % (host_name), debug) + + #Get the environment object by the hostname + environment_obj = find_obj_by_name(dx_session_obj.server_session, + environment, host_name) + + except DlpxException as e: + print('\nERROR: Engine %s encountered an error while provisioning ' + '%s:\n%s\n' % (engine['hostname'], arguments['--target'], e)) + sys.exit(1) + + print_debug('Getting database information for %s\n' % + (arguments['--source']), debug) + try: + #Get the database reference we are copying from the database name + database_obj = find_obj_by_name(dx_session_obj.server_session, + database, arguments['--source']) + except DlpxException: + return + + thingstodo = ["thingtodo"] + #reset the running job count before we begin + i = 0 + + try: + with dx_session_obj.job_mode(single_thread): + while (len(jobs) > 0 or len(thingstodo) > 0): + arg_type = arguments['--type'].lower() + if len(thingstodo)> 0: + + if arg_type == "oracle": + create_oracle_si_vdb(engine, jobs, database_name, + group_obj, environment_obj, + database_obj, + arguments['--prerefresh'], + arguments['--postrefresh'], + arguments['--prerollback'], + arguments['--postrollback'], + arguments['--configure-clone']) + + elif arg_type == "ase": + create_ase_vdb(engine, server, jobs, group_obj, + database_name, environment_obj, + database_obj) + + elif arg_type == "mssql": + create_mssql_vdb(engine, jobs, group_obj, + database_name, environment_obj, + database_obj) + + elif arg_type == "vfiles": + create_vfiles_vdb(engine, jobs, group_obj, + database_name, environment_obj, + database_obj, + arguments['--prerefresh'], + arguments['--postrefresh'], + arguments['--prerollback'], + arguments['--postrollback'], + arguments['--configure-clone']) + + thingstodo.pop() + + #get all the jobs, then inspect them + i = 0 + for j in jobs.keys(): + job_obj = job.get(dx_session_obj.server_session, jobs[j]) + print_debug(job_obj, debug) + print_info(engine["hostname"] + ": VDB Provision: " + + job_obj.job_state) + + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + #If the job is in a non-running state, remove it from + # the running jobs list. + del jobs[j] + else: + #If the job is in a running state, increment the + # running job count. + i += 1 + + print_info('%s: %s jobs running.' % (engine['hostname'], + str(i))) + + #If we have running jobs, pause before repeating the checks. + if len(jobs) > 0: + sleep(float(arguments['--poll'])) + + except (DlpxException, JobError) as e: + print '\nError while provisioning %s:\n%s' % (database_name, e.message) + sys.exit(1) + + +def run_job(): + """ + This function runs the main_workflow aynchronously against all the servers + specified + + No arguments required for run_job(). + """ + #Create an empty list to store threads we create. + threads = [] + + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + + try: + #For each server in the dxtools.conf... + for delphix_engine in dx_session_obj.dlpx_engines: + engine = dx_session_obj[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + + except DlpxException as e: + print 'Error encountered in main_workflow:\n%s' % (e) + sys.exit(1) + + elif arguments['--all'] is False: + #Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dx_session_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: %s\n' % + (arguments['--engine'])) + + except (DlpxException, RequestError, KeyError) as e: + raise DlpxException('\nERROR: Delphix Engine %s cannot be ' + 'found in %s. Please check your value ' + 'and try again. Exiting.\n' % ( + arguments['--engine'], config_file_path)) + + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dx_session_obj.dlpx_engines: + if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ + 'true': + + engine = dx_session_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: %s' % ( + dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) + + break + + + if engine == None: + raise DlpxException("\nERROR: No default engine found. Exiting") + + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def serversess(f_engine_address, f_engine_username, f_engine_password): + """ + Function to setup the session with the Delphix Engine + """ + server_session= DelphixEngine(f_engine_address, f_engine_username, + f_engine_password, "DOMAIN") + return server_session + + +def set_exit_handler(func): + """ + This function helps us set the correct exit code + """ + signal.signal(signal.SIGTERM, func) + + +def set_timeflow_point(engine, server, container_obj): + """ + This returns the reference of the timestamp specified. + """ + + if arguments['--timestamp_type'].upper() == "SNAPSHOT": + if arguments['--timestamp'].upper() == "LATEST": + print_debug('%s: Using the latest Snapshot.' % + (engine['hostname']), debug) + + timeflow_point_parameters = TimeflowPointSemantic() + timeflow_point_parameters.container = container_obj.reference + timeflow_point_parameters.location = "LATEST_SNAPSHOT" + + elif arguments['--timestamp'].startswith("@"): + print_debug('%s: Using a named snapshot' % (engine['hostname']), + debug) + + snapshot_obj = find_snapshot_by_database_and_name(engine, server, + container_obj, + arguments['--timestamp']) + + if snapshot_obj != None: + timeflow_point_parameters=TimeflowPointLocation() + timeflow_point_parameters.timeflow = snapshot_obj.timeflow + timeflow_point_parameters.location = \ + snapshot_obj.latest_change_point.location + + else: + raise DlpxException('%s: Was unable to use the specified ' + 'snapshot %s for database %s\n' % + (engine['hostname'], + arguments['--timestamp'], + container_obj.name)) + + else: + print_debug('%s: Using a time-designated snapshot' % + (engine['hostname']), debug) + + snapshot_obj = find_snapshot_by_database_and_time(engine, server, + container_obj, + arguments['--timestamp']) + if snapshot_obj != None: + timeflow_point_parameters=TimeflowPointTimestamp() + timeflow_point_parameters.timeflow = snapshot_obj.timeflow + timeflow_point_parameters.timestamp = \ + snapshot_obj.latest_change_point.timestamp + else: + raise DlpxException('%s: Was unable to find a suitable time ' + ' for %s for database %s.\n' % + (engine['hostname'], + arguments['--timestamp'], + container_obj.name)) + + elif arguments['--timestamp_type'].upper() == "TIME": + if arguments['--timestamp'].upper() == "LATEST": + timeflow_point_parameters = TimeflowPointSemantic() + timeflow_point_parameters.location = "LATEST_POINT" + else: + raise DlpxException('%s: Only support a --timestamp value of ' + '"latest" when used with timestamp_type ' + 'of time' %s (engine['hostname'])) + + else: + raise DlpxException('%s is not a valied timestamp_type. Exiting\n' % + (arguments['--timestamp_type'])) + + timeflow_point_parameters.container = container_obj.reference + return timeflow_point_parameters + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + elapsed_minutes = round((time() - time_start)/60, +1) + return elapsed_minutes + +def update_jobs_dictionary(engine, server, jobs): + """ + This function checks each job in the dictionary and updates its status or + removes it if the job is complete. + Return the number of jobs still running. + """ + #Establish the running jobs counter, as we are about to update the count + # from the jobs report. + i = 0 + #get all the jobs, then inspect them + for j in jobs.keys(): + job_obj = job.get(server, jobs[j]) + print_debug('%s: %s' % (engine['hostname'], str(job_obj)), debug) + print_info('%s: %s: %s' % (engine['hostname'], j.name, + job_obj.job_state)) + + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + #If the job is in a non-running state, remove it from the running + # jobs list. + del jobs[j] + else: + #If the job is in a running state, increment the running job count. + i += 1 + return i + + +def main(argv): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global usebackup + global time_start + global config_file_path + global database_name + global host_name + global dx_session_obj + global debug + + try: + dx_session_obj = GetSession() + debug = arguments['--debug'] + logging_est(arguments['--logdir'], debug) + print_debug(arguments, debug) + time_start = time() + single_thread = False + config_file_path = arguments['--config'] + + print_info('Welcome to %s version %s' % (basename(__file__), + VERSION)) + + #Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + + database_name = arguments['--target'] + host_name = arguments['--environment'] + + #This is the function that will handle processing main_workflow for + # all the servers. + run_job() + + elapsed_minutes = time_elapsed() + print_info('script took %s minutes to get this far. ' % + (str(elapsed_minutes))) + + #Here we handle what we do when the unexpected happens + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + + except DlpxException as e: + """ + We use this exception handler when an error occurs in a function call. + """ + + print('\nERROR: Please check the ERROR message below:\n%s' % + (e.message)) + sys.exit(2) + + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print('\nERROR: Connection failed to the Delphix Engine. Please ' + 'check the ERROR message below:\n%s' % (e.message)) + sys.exit(2) + + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so + that we have actionable data + """ + print 'A job failed in the Delphix Engine:\n%s' (e.job) + elapsed_minutes = time_elapsed() + print_info('%s took %s minutes to get this far' % (basename(__file__), + str(elapsed_minutes))) + sys.exit(3) + + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug('You sent a CTRL+C to interrupt the process') + elapsed_minutes = time_elapsed() + print_info('%s took %s minutes to get this far' % (basename(__file__), + str(elapsed_minutes))) + + except: + """ + Everything else gets caught here + """ + print(sys.exc_info()[0]) + print(traceback.format_exc()) + elapsed_minutes = time_elapsed() + print_info('%s took %s minutes to get this far' % (basename(__file__), + str(elapsed_minutes))) + sys.exit(1) + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + #Feed our arguments to the main function, and off we go! + main(arguments) diff --git a/v1_8_2/dx_refresh_db.py b/v1_8_2/dx_refresh_db.py new file mode 100755 index 0000000..e6fcf26 --- /dev/null +++ b/v1_8_2/dx_refresh_db.py @@ -0,0 +1,905 @@ +#!/usr/bin/env python +#Adam Bowen - Apr 2016 +#This script refreshes a vdb +# Updated by Corey Brune Oct 2016 +#requirements +#pip install --upgrade setuptools pip docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. This thing is brilliant. +"""Refresh a vdb +Usage: + dx_refresh_db.py (--name | --dsource | --all_vdbs [--group_name ]| --host | --list_timeflows | --list_snapshots) + [--timestamp_type ] + [--timestamp --timeflow ] + [-d | --engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_refresh_db.py -h | --help | -v | --version +Refresh a Delphix VDB +Examples: + dx_refresh_db.py --name "aseTest" --group_name "Analytics" + dx_refresh_db.py --dsource "dlpxdb1" + dx_refresh_db.py --all_vdbs --host LINUXSOURCE --parallel 4 --debug -d landsharkengine + dx_refresh_db.py --all_vdbs --group_name "Analytics" --all +Options: + --name Name of the object you are refreshing. + --all_vdbs Refresh all VDBs that meet the filter criteria. + --dsource Name of dsource in Delphix to execute against. + --group_name Name of the group to execute against. + --list_timeflows List all timeflows + --list_snapshots List all snapshots + --host Name of environment in Delphix to execute against. + --timestamp_type The type of timestamp you are specifying. + Acceptable Values: TIME, SNAPSHOT + [default: SNAPSHOT] + --timestamp + The Delphix semantic for the point in time on + the source from which you want to refresh your VDB. + Formats: + latest point in time or snapshot: LATEST + point in time: "YYYY-MM-DD HH24:MI:SS" + snapshot name: "@YYYY-MM-DDTHH24:MI:SS.ZZZ" + snapshot time from GUI: "YYYY-MM-DD HH24:MI" + [default: LATEST] + --timeflow Name of the timeflow to refresh a VDB + -d Identifier of Delphix engine in dxtools.conf. + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_refresh_db.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = 'v.0.1.615' + + +from docopt import docopt +import logging +from os.path import basename +import sys +import traceback +import json +from time import sleep, time + +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy import job_context +from delphixpy.web import database +from delphixpy.web import environment +from delphixpy.web import group +from delphixpy.web import job +from delphixpy.web import source +from delphixpy.web import timeflow +from delphixpy.web.snapshot import snapshot +from delphixpy.web.vo import OracleRefreshParameters +from delphixpy.web.vo import RefreshParameters +from delphixpy.web.vo import TimeflowPointLocation +from delphixpy.web.vo import TimeflowPointSemantic +from delphixpy.web.vo import TimeflowPointTimestamp + +from lib.DlpxException import DlpxException +from lib.GetSession import GetSession +from lib.GetReferences import find_obj_by_name +from lib.DxLogging import logging_est +from lib.DxLogging import print_info +from lib.DxLogging import print_debug +from lib.DxLogging import print_exception + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + E.g.: + @run_async + def task1(): + do_something + @run_async + def task2(): + do_something_too + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +def find_all_databases_by_dsource_name(engine, server, dsource_name, + exclude_js_container=True): + """ + Easy way to quickly find databases by dSource + """ + + #First search for the dSource name specified and return its reference + dsource_obj = find_obj_by_name(engine, server, database, dsource_name) + + if dsource_obj: + return(database.get_all(server, + provision_container=dsource_obj.reference, + no_js_container_data_source=exclude_js_container)) + + +def find_all_databases_by_group_name(engine, server, group_name, + exclude_js_container=True): + """ + Easy way to quickly find databases by group name + """ + + #First search groups for the name specified and return its reference + group_obj = find_obj_by_name(engine, server, group, group_name) + if group_obj: + return(database.get_all(server, group=group_obj.reference, + no_js_container_data_source=exclude_js_container)) + + +def find_database_by_name_and_group_name(engine, server, group_name, + database_name): + + databases = find_all_databases_by_group_name(engine, server, group_name) + + for each in databases: + if each.name == database_name: + print_debug(engine["hostname"] + ": Found a match " + + str(each.reference)) + return each + + print_info(engine["hostname"] + ": Unable to find \"" + + database_name + "\" in " + group_name) + + +def find_snapshot_by_database_and_name(engine, server, database_obj, snap_name): + snapshots = snapshot.get_all(server, database=database_obj.reference) + matches = [] + for snapshot_obj in snapshots: + if str(snapshot_obj.name).startswith(arguments['--timestamp']): + matches.append(snapshot_obj) + + if len(matches) == 1: + + print_debug(engine["hostname"] + + ": Found one and only one match. This is good.") + print_debug(engine["hostname"] + ": " + matches[0]) + + return matches[0] + + elif len(matches) > 1: + print_error("The name specified was not specific enough. " + "More than one match found.") + + for each in matches: + print_debug(engine["hostname"] + ": " + each.name) + else: + print_error("No matches found for the time specified") + print_error("No matching snapshot found") + + +def find_snapshot_by_database_and_time(engine, server, database_obj, snap_time): + """ + Find snapshot object by database name and timetamp + engine: + server: A Delphix engine object. + database_obj: The database reference to retrieve the snapshot + snap_time: timstamp of the snapshot + """ + snapshots = snapshot.get_all(server, database=database_obj.reference) + matches = [] + + for snapshot_obj in snapshots: + if str(snapshot_obj.latest_change_point.timestamp) == snap_time \ + or str(snapshot_obj.first_change_point.timestamp) == snap_time: + + matches.append(snapshot_obj) + + if len(matches) == 1: + snap_match = get_obj_name(server, database, matches[0].container) + print_debug(engine['hostname'] + + ': Found one and only one match. This is good.') + print_debug(engine['hostname'] + ': ' + snap_match) + + + return matches[0] + + elif len(matches) > 1: + print_debug(engine["hostname"] + ": " + matches) + raise DlpxException('The time specified was not specific enough.' + ' More than one match found.\n') + else: + raise DlpxException('No matches found for the time specified.\n') + + +def find_source_by_database(engine, server, database_obj): + #The source tells us if the database is enabled/disables, virtual, + # vdb/dSource, or is a staging database. + source_obj = source.get_all(server, database=database_obj.reference) + + #We'll just do a little sanity check here to ensure we only have a + # 1:1 result. + if len(source_obj) == 0: + print_error(engine["hostname"] + ": Did not find a source for " + + database_obj.name + ". Exiting") + sys.exit(1) + + elif len(source_obj) > 1: + print_error(engine["hostname"] + + ": More than one source returned for " + + database_obj.name + ". Exiting") + print_error(source_obj) + sys.exit(1) + + return source_obj + + +def get_config(config_file_path): + """ + This function reads in the dxtools.conf file + """ + #First test to see that the file is there and we can open it + try: + config_file = open(config_file_path).read() + except: + print_error("Was unable to open " + config_file_path + + ". Please check the path and permissions, then try again.") + sys.exit(1) + + #Now parse the file contents as json and turn them into a + # python dictionary, throw an error if it isn't proper json + try: + config = json.loads(config_file) + except: + print_error("Was unable to read " + config_file_path + + " as json. Please check file in a json formatter and " \ + "try again.") + sys.exit(1) + + #Create a dictionary of engines (removing the data node from the + # dxtools.json, for easier parsing) + delphix_engines = {} + for each in config['data']: + delphix_engines[each['hostname']] = each + + print_debug(delphix_engines) + return delphix_engines + + +def job_mode(server): + """ + This function tells Delphix how to execute jobs, based on the + single_thread variable at the beginning of the file + """ + #Synchronously (one at a time) + if single_thread == True: + job_m = job_context.sync(server) + print_debug("These jobs will be executed synchronously") + #Or asynchronously + else: + job_m = job_context.async(server) + print_debug("These jobs will be executed asynchronously") + return job_m + + +def job_wait(): + """ + This job stops all work in the thread/process until all jobs on the + engine are completed. + """ + #Grab all the jos on the server (the last 25, be default) + all_jobs = job.get_all(server) + #For each job in the list, check to see if it is running (not ended) + for jobobj in all_jobs: + if not (jobobj.job_state in ["CANCELED", "COMPLETED", "FAILED"]): + print_debug("Waiting for " + jobobj.reference + " (currently: " + + jobobj.job_state + + ") to finish running against the container") + + #If so, wait + job_context.wait(server,jobobj.reference) + + +def get_obj_name(server, f_object, obj_reference): + """ + Return the object name from obj_reference + + engine: A Delphix engine object. + obj_reference: The object reference to retrieve the name + """ + + try: + obj_name = f_object.get(server, obj_reference) + return(obj_name.name) + + except RequestError as e: + raise dlpxExceptionHandler(e) + + except HttpError as e: + raise DlpxException(e) + + +def list_snapshots(server): + """ + List all snapshots with timestamps + """ + + header = 'Snapshot Name, First Change Point, Location, Latest Change Point' + snapshots = snapshot.get_all(server) + + print header + for snap in snapshots: + container_name = get_obj_name(server, database, snap.container) + snap_range = snapshot.timeflow_range(server, snap.reference) + + print '{}, {}, {}, {}, {}'.format(str(snap.name), + container_name, + snap_range.start_point.timestamp, + snap_range.start_point.location, + snap_range.end_point.timestamp) + + +@run_async +def main_workflow(engine): + """ + This function is where we create our main workflow. + Use the @run_async decorator to run this function asynchronously. + The @run_async decorator allows us to run against multiple Delphix Engine + simultaneously + """ + + #Pull out the values from the dictionary for this engine + engine_address = engine["ip_address"] + engine_username = engine["username"] + engine_password = engine["password"] + #Establish these variables as empty for use later + databases = [] + environment_obj = None + source_objs = None + jobs = {} + + + #Setup the connection to the Delphix Engine + server = serversess(engine_address, engine_username, engine_password) + + #If an environment/server was specified + if host_name: + print_debug(engine["hostname"] + ": Getting environment for " + + host_name) + #Get the environment object by the hostname + environment_obj = find_obj_by_name(engine, server, environment, + host_name) + + if environment_obj != None: + #Get all the sources running on the server + env_source_objs = source.get_all(server, + environment=environment_obj.reference) + + #If the server doesn't have any objects, exit. + if env_source_objs == None: + print_error(host_name + "does not have any objects. Exiting") + sys.exit(1) + + #If we are only filtering by the server, then put those objects in + # the main list for processing + if not(arguments['--group_name'] and database_name): + source_objs = env_source_objs + all_dbs = database.get_all(server, + no_js_container_data_source=True) + databases = [] + for source_obj in source_objs: + if source_obj.staging == False and \ + source_obj.virtual == True: + + database_obj = database.get(server, + source_obj.container) + + if database_obj in all_dbs: + databases.append(database_obj) + else: + print_error(engine["hostname"] + ":No environment found for " + + host_name + ". Exiting") + sys.exit(1) + + #If we specified a specific database by name.... + if arguments['--name']: + #Get the database object from the name + + database_obj = find_obj_by_name(engine, server, database, + arguments['--name']) + if database_obj: + databases.append(database_obj) + + #Else if we specified a group to filter by.... + elif arguments['--group_name']: + print_debug(engine["hostname"] + ":Getting databases in group " + + arguments['--group_name']) + #Get all the database objects in a group. + databases = find_all_databases_by_group_name(engine, server, + arguments['--group_name']) + + #Else if we specified a dSource to filter by.... + elif arguments['--dsource']: + print_debug(engine["hostname"] + ":Getting databases for dSource" + + arguments['--dsource']) + + #Get all the database objects in a group. + databases = find_all_databases_by_dsource_name(engine, server, + arguments['--dsource']) + + #Else, if we said all vdbs ... + elif arguments['--all_vdbs'] and not arguments['--host'] : + print_debug(engine['hostname'] + ':Getting all VDBs ') + + #Grab all databases, but filter out the database that are in JetStream + #containers, because we can't refresh those this way. + databases = database.get_all(server, no_js_container_data_source=True) + + elif arguments['--list_timeflows']: + list_timeflows(server) + + elif arguments['--list_snapshots']: + list_snapshots(server) + + #reset the running job count before we begin + i = 0 + with job_mode(server): + #While there are still running jobs or databases still to process.... + + while (len(jobs) > 0 or len(databases) > 0): + + #While there are databases still to process and we are still under + #the max simultaneous jobs threshold (if specified) + while len(databases) > 0 and (arguments['--parallel'] == None or \ + i < int(arguments['--parallel'])): + + #Give us the next database in the list, and then remove it + database_obj = databases.pop() + #Get the source of the database. + source_obj = find_source_by_database(engine, server, + database_obj) + + #If we applied the environment/server filter AND group filter, + # find the intersecting matches + if environment_obj != None and (arguments['--group_name']): + match = False + + for env_source_obj in env_source_objs: + if source_obj[0].reference in env_source_obj.reference: + match = True + break + if match == False: + print_error(engine["hostname"] + ": " + + database_obj.name + " does not exist on " + + host_name + ". Exiting") + return + + #Refresh the database + refresh_job = refresh_database(engine, server, jobs, + source_obj[0], database_obj) + #If refresh_job has any value, then we know that a job was + # initiated. + + if refresh_job: + #increment the running job count + i += 1 + #Check to see if we are running at max parallel processes, and + # report if so. + if ( arguments['--parallel'] != None and \ + i >= int(arguments['--parallel'])): + + print_info(engine["hostname"] + ": Max jobs reached (" + + str(i) + ")") + + i = update_jobs_dictionary(engine, server, jobs) + print_info(engine["hostname"] + ": " + str(i) + " jobs running. " + + str(len(databases)) + " jobs waiting to run") + + #If we have running jobs, pause before repeating the checks. + if len(jobs) > 0: + sleep(float(arguments['--poll'])) + +def print_error(print_obj): + """ + Call this function with a log message to prefix the message with ERROR + """ + print "ERROR: " + str(print_obj) + logging.error(str(print_obj)) + + +def print_warning(print_obj): + """ + Call this function with a log message to prefix the message with WARNING + """ + print "WARNING: " + str(print_obj) + logging.warning(str(print_obj)) + + +def refresh_database(engine, server, jobs, source_obj, container_obj): + """ + This function actually performs the refresh + engine: + server: Engine object + jobs: list containing running jobs + source_obj: source object used to refresh from snapshot or timeflow + container_obj: VDB container + """ + + #Sanity check to make sure our source object has a reference + if source_obj.reference: + #We can only refresh VDB's + if source_obj.virtual != True: + print_warning(engine["hostname"] + ": " + container_obj.name + + " is not a virtual object. Skipping.") + + #Ensure this source is not a staging database. We can't act upon those. + elif source_obj.staging == True: + print_warning(engine["hostname"] + ": " + container_obj.name + + " is a staging database. Skipping.") + + #Ensure the source is enabled. We can't refresh disabled databases. + elif source_obj.runtime.enabled == "ENABLED" : + source_db = database.get(server, container_obj.provision_container) + if not source_db: + print_error(engine["hostname"] + + ":Was unable to retrieve the source container for " + + container_obj.name) + print_info(engine["hostname"] + ": Refreshing " + + container_obj.name + " from " + source_db.name) + print_debug(engine["hostname"] + ": Type: " + source_obj.type ) + print_debug(engine["hostname"] + ":" + source_obj.type) + + #If the vdb is a Oracle type, we need to use a + # OracleRefreshParameters + + if str(container_obj.reference).startswith("ORACLE"): + refresh_params = OracleRefreshParameters() + else: + refresh_params = RefreshParameters() + + try: + refresh_params.timeflow_point_parameters = set_timeflow_point( + engine, server, + source_db) + print_debug(engine["hostname"] + ":" + str(refresh_params)) + + #Sync it + database.refresh(server, container_obj.reference, + refresh_params) + jobs[container_obj] = server.last_job + + except RequestError as e: + print '\nERROR: Could not set timeflow point:\n%s\n' % ( + e.message.action) + sys.exit(1) + + except DlpxException as e: + print 'ERROR: Could not set timeflow point:\n%s\n' % (e.message) + sys.exit(1) + + + #return the job object to the calling statement so that we can + # tell if a job was created or not (will return None, if no job) + return server.last_job + + #Don't do anything if the database is disabled + else: + print_warning(engine["hostname"] + ": " + container_obj.name + + " is not enabled. Skipping sync") + + +def run_job(engine): + """ + This function runs the main_workflow aynchronously against all the + servers specified + """ + + #Create an empty list to store threads we create. + threads = [] + #If the --all argument was given, run against every engine in dxtools.conf + + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + + #For each server in the dxtools.conf... + for delphix_engine in dxtools_objects: + engine = dxtools_objects[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + + else: + #Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dxtools_objects[arguments['--engine']] + print_info("Executing against Delphix Engine: " + + arguments['--engine']) + except: + print_error("Delphix Engine \"" + arguments['--engine'] + "\" \ + cannot be found in " + config_file_path) + print_error("Please check your value and try again. Exiting") + sys.exit(1) + + #Else if the -d argument was given, test to see if the engine exists + # in dxtools.conf + elif arguments['-d']: + try: + engine = dxtools_objects[arguments['-d']] + print_info("Executing against Delphix Engine: " + + arguments['-d']) + except: + print_error("Delphix Engine \"" + arguments['-d'] + + "\" cannot be found in " + config_file_path) + print_error("Please check your value and try again. Exiting") + sys.exit(1) + + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dxtools_objects: + if dxtools_objects[delphix_engine]['default'] == 'true': + engine = dxtools_objects[delphix_engine] + print_info("Executing against the default Delphix Engine" \ + " in the dxtools.conf: " + + dxtools_objects[delphix_engine]['hostname']) + break + if engine == None: + print_error("No default engine found. Exiting") + sys.exit(1) + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def serversess(f_engine_address, f_engine_username, f_engine_password): + """ + Function to setup the session with the Delphix Engine + """ + server_session= DelphixEngine(f_engine_address, f_engine_username, + f_engine_password, "DOMAIN") + return server_session + + +def list_timeflows(server): + """ + Retrieve and print all timeflows for a given engine + """ + + ret_timeflow_dct = {} + all_timeflows = timeflow.get_all(server) + + print 'DB Name, Timeflow Name, Timestamp' + + for tfbm_lst in all_timeflows: + try: + + db_name = get_obj_name(server, database, tfbm_lst.container) + print '%s, %s, %s\n' % (str(db_name), str(tfbm_lst.name), + str(tfbm_lst.parent_point.timestamp)) + + except AttributeError: + print '%s, %s\n' % (str(tfbm_lst.name), str(db_name)) + + except TypeError as e: + raise DlpxException('Listing Timeflows encountered an error:\n%s' % + (e.message)) + + except RequestError, e: + dlpx_err = e.message + raise DlpxException(dlpx_err.action) + + +def set_timeflow_point(engine, server, container_obj): + """ + This returns the reference of the timestamp specified. + engine: + server: Delphix Engine object + container_obj: VDB object + """ + + if arguments['--timestamp_type'].upper() == "SNAPSHOT": + if arguments['--timestamp'].upper() == "LATEST": + print_debug(engine["hostname"] + ": Using the latest Snapshot") + timeflow_point_parameters = TimeflowPointSemantic() + timeflow_point_parameters.location = "LATEST_SNAPSHOT" + + elif arguments['--timestamp'].startswith("@"): + print_debug(engine["hostname"] + ": Using a named snapshot") + snapshot_obj = find_snapshot_by_database_and_name(engine, server, + container_obj, + arguments['--timestamp']) + + if snapshot_obj: + timeflow_point_parameters=TimeflowPointLocation() + timeflow_point_parameters.timeflow = snapshot_obj.timeflow + timeflow_point_parameters.location = \ + snapshot_obj.latest_change_point.location + + else: + raise DlpxException('ERROR: Was unable to use the specified ' + 'snapshot %s for database %s.\n' % + (arguments['--timestamp'], + container_obj.name)) + + elif arguments['--timestamp']: + print_debug(engine["hostname"] + + ": Using a time-designated snapshot") + snapshot_obj = find_snapshot_by_database_and_time( + engine, server, container_obj, + arguments['--timestamp']) + + if snapshot_obj: + timeflow_point_parameters=TimeflowPointTimestamp() + timeflow_point_parameters.timeflow = snapshot_obj.timeflow + timeflow_point_parameters.timestamp = \ + snapshot_obj.latest_change_point.timestamp + + else: + raise DlpxException('Was unable to find a suitable time' + ' for %s for database %s' % + (arguments['--timestamp'], + container_obj.name)) + + elif arguments['--timestamp_type'].upper() == "TIME": + + if arguments['--timestamp'].upper() == "LATEST": + timeflow_point_parameters = TimeflowPointSemantic() + timeflow_point_parameters.location = "LATEST_POINT" + + elif arguments['--timestamp']: + timeflow_point_parameters = TimeflowPointTimestamp() + timeflow_point_parameters.type = 'TimeflowPointTimestamp' + timeflow_obj = find_obj_by_name(engine, server, timeflow, + arguments['--timeflow']) + + timeflow_point_parameters.timeflow = timeflow_obj.reference + timeflow_point_parameters.timestamp = arguments['--timestamp'] + return timeflow_point_parameters + else: + raise DlpxException(arguments['--timestamp_type'] + + " is not a valied timestamp_type. Exiting") + + timeflow_point_parameters.container = container_obj.reference + return timeflow_point_parameters + + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + elapsed_minutes = round((time() - time_start)/60, +1) + return elapsed_minutes + + +def update_jobs_dictionary(engine, server, jobs): + """ + This function checks each job in the dictionary and updates its status or + removes it if the job is complete. + Return the number of jobs still running. + """ + #Establish the running jobs counter, as we are about to update the count + # from the jobs report. + i = 0 + #get all the jobs, then inspect them + for j in jobs.keys(): + job_obj = job.get(server, jobs[j]) + print_debug(engine["hostname"] + ": " + str(job_obj)) + print_info(engine["hostname"] + ": " + j.name + ": " + + job_obj.job_state) + + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + #If the job is in a non-running state, remove it from the running + # jobs list. + del jobs[j] + else: + #If the job is in a running state, increment the running job count. + i += 1 + return i + + +def main(argv): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global usebackup + global time_start + global host_name + global database_name + global config_file_path + global dxtools_objects + + try: + #Declare globals that will be used throughout the script. + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + engine = None + single_thread = False + database_name = arguments['--name'] + host_name = arguments['--host'] + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dxtools_objects = get_config(config_file_path) + + #This is the function that will handle processing main_workflow for + # all the servers. + run_job(engine) + + elapsed_minutes = time_elapsed() + print_info("script took " + str(elapsed_minutes) + + " minutes to get this far.") + + #Here we handle what we do when the unexpected happens + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print_error("Connection failed to the Delphix Engine") + print_error( "Please check the ERROR message below") + print_error(e.message) + sys.exit(2) + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so that we + have actionable data + """ + print_error("A job failed in the Delphix Engine") + print_error(e.job) + elapsed_minutes = time_elapsed() + print_info(basename(__file__) + " took " + str(elapsed_minutes) + + " minutes to get this far.") + sys.exit(3) + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info(basename(__file__) + " took " + str(elapsed_minutes) + + " minutes to get this far.") + except: + """ + Everything else gets caught here + """ + print_error(sys.exc_info()[0]) + print_error(traceback.format_exc()) + elapsed_minutes = time_elapsed() + print_info(basename(__file__) + " took " + str(elapsed_minutes) + + " minutes to get this far.") + sys.exit(1) + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + + #Feed our arguments to the main function, and off we go! + main(arguments) diff --git a/v1_8_2/dx_replication.py b/v1_8_2/dx_replication.py new file mode 100755 index 0000000..f6c7def --- /dev/null +++ b/v1_8_2/dx_replication.py @@ -0,0 +1,421 @@ +#!/usr/bin/env python +# Corey Brune - Feb 2017 +#Description: +# This script will setup replication between two hosts. +# +#Requirements +#pip install docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. +"""Description +Usage: + dx_replication.py --rep_name --target_host --target_user --target_pw --rep_objs [--schedule --bandwidth --num_cons --enabled] + dx_replication.py --delete + dx_replication.py --execute + dx_replication.py --list + [--engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + + dx_replication.py -h | --help | -v | --version + +Description +Setup replication between two hosts. +Examples: +dx_replication.py --rep_name mytest --target_host 172.16.169.141 --target_user delphix_admin --target_pw delphix --rep_objs mytest1 --schedule '55 0 19 * * ?' --enabled +dx_replication.py --rep_name mytest --target_host 172.16.169.141 --target_user delphix_admin --target_pw delphix --rep_objs mytest1 --schedule '0 40 20 */4 * ?' --bandwidth 5 --num_cons 2 --enabled + +dx_replication.py --delete mytest + +Options: + --rep_name Name of the replication job. + --target_host Name / IP of the target replication host. + --target_user Username for the replication target host. + --target_pw Password for the user. + --schedule Schedule of the replication job in crontab format. (seconds, minutes, hours, day of month, month) + [default: '0 0 0 */5 * ?'] + --rep_objs Comma delimited list of objects to replicate. + --delete Name of the replication job to delete. + --bandwidth Limit bandwidth to MB/s. + --num_cons Number of network connections for the replication job. + --list List all of the replication jobs. + --execute Name of the replication job to execute. + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_operations_vdb.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = 'v.0.0.001' + +import sys +from os.path import basename +from time import sleep, time +from docopt import docopt + +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy.web import job +from delphixpy.web import database +from delphixpy.web.replication import spec +from delphixpy.web.vo import ReplicationSpec +from delphixpy.web.vo import ReplicationList + +from lib.DlpxException import DlpxException +from lib.DxLogging import logging_est +from lib.DxLogging import print_debug +from lib.DxLogging import print_info +from lib.DxLogging import print_exception +from lib.GetReferences import find_obj_by_name +from lib.GetReferences import find_obj_specs +from lib.GetSession import GetSession + + +def create_replication_job(): + """ + Create a replication job + :return: Reference to the spec object + """ + rep_spec = ReplicationSpec() + rep_spec.name = arguments['--rep_name'] + rep_spec.target_host = arguments['--target_host'] + rep_spec.target_principal = arguments['--target_user'] + rep_spec.target_credential = {'type': 'PasswordCredential', 'password': + arguments['--target_pw']} + rep_spec.object_specification = ReplicationList() + rep_spec.schedule = arguments['--schedule'] + rep_spec.encrypted = True + + if arguments['--num_cons']: + rep_spec.number_of_connections = int(arguments['--num_cons']) + if arguments['--bandwidth']: + rep_spec.bandwidth_limit = int(arguments['--bandwidth']) + if arguments['--enabled']: + rep_spec.enabled = True + try: + rep_spec.object_specification.objects = find_obj_specs( + dx_session_obj.server_session, arguments['--rep_objs'].split(',')) + + ref = spec.create(dx_session_obj.server_session, rep_spec) + if dx_session_obj.server_session.last_job: + dx_session_obj.jobs[dx_session_obj.server_session.address] = \ + dx_session_obj.server_session.last_job + print_info('Successfully created {} with reference ' + '{}\n'.format(arguments['--rep_name'], ref)) + + except (HttpError, RequestError, DlpxException) as e: + print_exception('Could not create replication job {}:\n{}'.format( + arguments['--rep_name'], e)) + + +def delete_replication_job(): + """ + Delete a replication job. + :return: Reference to the spec object + """ + try: + spec.delete(dx_session_obj.server_session, + find_obj_by_name(dx_session_obj.server_session, spec, + arguments['--delete']).reference) + if dx_session_obj.server_session.last_job: + dx_session_obj.jobs[dx_session_obj.server_session.address] = \ + dx_session_obj.server_session.last_job + print_info('Successfully deleted {}.\n'.format(arguments['--delete'])) + + except (HttpError, RequestError, DlpxException) as e: + print_exception('Was not able to delete {}:\n{}'.format( + arguments['--delete'], e)) + + +def list_replication_jobs(): + """ + List the replication jobs on a given engine + """ + obj_names_lst = [] + + for rep_job in spec.get_all(dx_session_obj.server_session): + for obj_spec_ref in rep_job.object_specification.objects: + obj_names_lst.append(database.get(dx_session_obj.server_session, + obj_spec_ref).name) + + print('Name: {}\nReplicated Objects: {}\nEnabled: {}\nEncrypted: {}\n' + 'Reference: {}\nSchedule: {}\nTarget Host: {}\n\n'.format( + rep_job.name, ', '.join(obj_names_lst), rep_job.enabled, + rep_job.encrypted, rep_job.reference, rep_job.schedule, + rep_job.target_host)) + + +def execute_replication_job(obj_name): + """ + Execute a replication job immediately. + :param obj_name: name of object to execute. + """ + try: + spec.execute(dx_session_obj.server_session, + find_obj_by_name(dx_session_obj.server_session, + spec, obj_name).reference) + if dx_session_obj.server_session.last_job: + dx_session_obj.jobs[dx_session_obj.server_session.address] = \ + dx_session_obj.server_session.last_job + print_info('Successfully executed {}.\n'.format(obj_name)) + except (HttpError, RequestError, DlpxException, JobError) as e: + print_exception('Could not execute job {}:\n{}'.format(obj_name, e)) + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + E.g.: + @run_async + def task1(): + do_something + @run_async + def task2(): + do_something_too + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +@run_async +def main_workflow(engine): + """ + This function actually runs the jobs. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + + engine: Dictionary of engines + """ + + try: + #Setup the connection to the Delphix Engine + dx_session_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + + except DlpxException as e: + print_exception('\nERROR: Engine {} encountered an error while' + '{}:\n{}\n'.format(engine['hostname'], + arguments['--target'], e)) + sys.exit(1) + + thingstodo = ["thingtodo"] + try: + with dx_session_obj.job_mode(single_thread): + while (len(dx_session_obj.jobs) > 0 or len(thingstodo)> 0): + if len(thingstodo) > 0: + if arguments['--rep_name']: + create_replication_job() + elif arguments['--delete']: + delete_replication_job() + elif arguments['--list']: + list_replication_jobs() + elif arguments['--execute']: + execute_replication_job(arguments['--execute']) + thingstodo.pop() + # get all the jobs, then inspect them + i = 0 + for j in dx_session_obj.jobs.keys(): + job_obj = job.get(dx_session_obj.server_session, + dx_session_obj.jobs[j]) + print_debug(job_obj) + print_info('{}: Replication operations: {}'.format( + engine['hostname'], job_obj.job_state)) + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + # If the job is in a non-running state, remove it + # from the + # running jobs list. + del dx_session_obj.jobs[j] + elif job_obj.job_state in 'RUNNING': + # If the job is in a running state, increment the + # running job count. + i += 1 + print_info('{}: {:d} jobs running.'.format( + engine['hostname'], i)) + # If we have running jobs, pause before repeating the + # checks. + if len(dx_session_obj.jobs) > 0: + sleep(float(arguments['--poll'])) + + except (HttpError, RequestError, JobError, DlpxException) as e: + print_exception('ERROR: Could not complete replication' + ' operation:{}'.format(e)) + + +def run_job(): + """ + This function runs the main_workflow aynchronously against all the servers + specified + """ + #Create an empty list to store threads we create. + threads = [] + engine = None + + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + + try: + #For each server in the dxtools.conf... + for delphix_engine in dx_session_obj.dlpx_engines: + engine = dx_session_obj[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + + except DlpxException as e: + print 'Error encountered in run_job():\n{}'.format(e) + sys.exit(1) + + elif arguments['--all'] is False: + #Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dx_session_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: {}\n'.format( + arguments['--engine'])) + + except (DlpxException, RequestError, KeyError) as e: + raise DlpxException('\nERROR: Delphix Engine {} cannot be ' + 'found in {}. Please check your value ' + 'and try again. Exiting.\n'.format( + arguments['--engine'], config_file_path)) + + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dx_session_obj.dlpx_engines: + if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ + 'true': + engine = dx_session_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: {}'.format( + dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) + break + + if engine == None: + raise DlpxException("\nERROR: No default engine found. Exiting") + + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + return round((time() - time_start)/60, +1) + + +def main(arguments): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global usebackup + global time_start + global config_file_path + global dx_session_obj + global debug + + if arguments['--debug']: + debug = True + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + single_thread = False + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + #This is the function that will handle processing main_workflow for + # all the servers. + run_job() + + elapsed_minutes = time_elapsed() + print_info('script took {:.2f} minutes to get this far.'.format( + elapsed_minutes)) + + #Here we handle what we do when the unexpected happens + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print_exception('Connection failed to the Delphix Engine' + 'Please check the ERROR message:\n{}'.format(e)) + sys.exit(1) + + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so that + we have actionable data + """ + elapsed_minutes = time_elapsed() + print_exception('A job failed in the Delphix Engine') + print_info('{} took {:.2f} minutes to get this far\n{}'.format( + basename(__file__), elapsed_minutes, e)) + sys.exit(3) + + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far\n'.format( + basename(__file__), elapsed_minutes)) + + except: + """ + Everything else gets caught here + """ + print_exception(sys.exc_info()[0]) + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far\n'.format( + basename(__file__), elapsed_minutes)) + sys.exit(1) + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + #Feed our arguments to the main function, and off we go! + main(arguments) diff --git a/v1_8_2/dx_rewind_vdb.py b/v1_8_2/dx_rewind_vdb.py new file mode 100755 index 0000000..9741f4c --- /dev/null +++ b/v1_8_2/dx_rewind_vdb.py @@ -0,0 +1,386 @@ +#!/usr/bin/env python +#Corey Brune - Sep 2016 +#This script performs a rewind of a vdb +#requirements +#pip install --upgrade setuptools pip docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. + +"""Rewinds a vdb +Usage: + dx_rewind_vdb.py (--vdb [--timestamp_type ] [--timestamp ]) + [--bookmark ] + [ --engine --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_rewind_vdb.py -h | --help | -v | --version + +Rewinds a Delphix VDB +Examples: + Rollback to latest snapshot using defaults: + dx_rewind_vdb.py --vdb testVdbUF + Rollback using a specific timestamp: + dx_rewind_vdb.py --vdb testVdbUF --timestamp_type snapshot --timestamp 2016-11-15T11:30:17.857Z + + +Options: + --vdb Name of VDB to rewind + --type Type of database: oracle, mssql, ase, vfiles + --timestamp_type The type of timestamp being used for the reqwind. + Acceptable Values: TIME, SNAPSHOT + [default: SNAPSHOT] + --all Run against all engines. + --timestamp + The Delphix semantic for the point in time on + the source to rewind the VDB. + Formats: + latest point in time or snapshot: LATEST + point in time: "YYYY-MM-DD HH24:MI:SS" + snapshot name: "@YYYY-MM-DDTHH24:MI:SS.ZZZ" + snapshot time from GUI: "YYYY-MM-DD HH24:MI" + [default: LATEST] + --engine Alt Identifier of Delphix engine in dxtools.conf. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_rewind_vdb.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = "v.0.2.015" + + +from docopt import docopt +from os.path import basename +import sys +from time import time, sleep +import traceback + +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy.web import database +from delphixpy.web import job +from delphixpy.web.vo import RollbackParameters +from delphixpy.web.vo import OracleRollbackParameters + +from lib.DlpxException import DlpxException +from lib.DxTimeflow import DxTimeflow +from lib.GetReferences import find_obj_by_name +from lib.GetSession import GetSession +from lib.DxLogging import logging_est +from lib.DxLogging import print_info +from lib.DxLogging import print_debug +from lib.DxLogging import print_exception + + +def rewind_database(dlpx_obj, vdb_name, timestamp, timestamp_type='SNAPSHOT'): + """ + This function performs the rewind (rollback) + + dlpx_obj: Virtualization Engine session object + vdb_name: VDB to be rewound + timestamp: Point in time to rewind the VDB + timestamp_type: The type of timestamp being used for the rewind + """ + + engine_name = dlpx_obj.dlpx_engines.keys()[0] + dx_timeflow_obj = DxTimeflow(dlpx_obj.server_session) + container_obj = find_obj_by_name(dlpx_obj.server_session, database, + vdb_name) + # Sanity check to make sure our container object has a reference + if container_obj.reference: + try: + if container_obj.virtual is not True: + raise DlpxException('{} in engine {} is not a virtual object. ' + 'Skipping.\n'.format(container_obj.name, + engine_name)) + elif container_obj.staging is True: + raise DlpxException('{} in engine {} is a virtual object. ' + 'Skipping.\n'.format(container_obj.name, + engine_name)) + elif container_obj.runtime.enabled == "ENABLED": + print_info('\nINFO: {} Rewinding {} to {}\n'.format( + engine_name, container_obj.name, timestamp)) + + # This exception is raised if rewinding a vFiles VDB + # since AppDataContainer does not have virtual, staging or + # enabled attributes. + except AttributeError: + pass + + print_debug('{}: Type: {}'.format(engine_name, container_obj.type)) + + # If the vdb is a Oracle type, we need to use a OracleRollbackParameters + if str(container_obj.reference).startswith("ORACLE"): + rewind_params = OracleRollbackParameters() + else: + rewind_params = RollbackParameters() + rewind_params.timeflow_point_parameters = \ + dx_timeflow_obj.set_timeflow_point(container_obj, timestamp_type, + timestamp) + print_debug('{}: {}'.format(engine_name, str(rewind_params))) + try: + # Rewind the VDB + database.rollback(dlpx_obj.server_session, container_obj.reference, + rewind_params) + dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job + print_info('VDB {} was rolled back.'.format(container_obj.name)) + except (RequestError, HttpError, JobError) as e: + print_exception('ERROR: {} encountered an error on {}' + ' during the rewind process:\n{}'.format( + engine_name, container_obj.name, e)) + # Don't do anything if the database is disabled + else: + print_info('{}: {} is not enabled. Skipping sync.'.format(engine_name, + container_obj.name)) + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + + E.g.: + @run_async + def task1(): + do_something + + @run_async + def task2(): + do_something_too + + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +@run_async +def main_workflow(engine, dlpx_obj): + """ + This function is where we create our main workflow. + Use the @run_async decorator to run this function asynchronously. + The @run_async decorator allows us to run against multiple Delphix Engine + simultaneously + + :param engine: Dictionary of engines + :type engine: dictionary + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession + """ + + try: + #Setup the connection to the Delphix Engine + dlpx_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + except DlpxException as e: + print_exception('ERROR: Engine {} encountered an error while' + 'rewinding {}:\n{}\n'.format(engine['hostname'], + arguments['--target'], e)) + + thingstodo = ["thingtodo"] + try: + with dlpx_obj.job_mode(single_thread): + while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0: + if len(thingstodo) > 0: + rewind_database(dlpx_obj, arguments['--vdb'], + arguments['--timestamp'], + arguments['--timestamp_type']) + thingstodo.pop() + + # get all the jobs, then inspect them + i = 0 + for j in dlpx_obj.jobs.keys(): + job_obj = job.get(dlpx_obj.server_session, + dlpx_obj.jobs[j]) + print_debug(job_obj) + print_info('{}: Refresh of {}: {}'.format( + engine['hostname'], arguments['--vdb'], + job_obj.job_state)) + if job_obj.job_state in ['CANCELED', 'COMPLETED', 'FAILED']: + # If the job is in a non-running state, remove it + # from the running jobs list. + del dlpx_obj.jobs[j] + elif job_obj.job_state in 'RUNNING': + # If the job is in a running state, increment the + # running job count. + i += 1 + print_info('{}: {:d} jobs running.'.format( + engine['hostname'], i)) + # If we have running jobs, pause before repeating the + # checks. + if len(dlpx_obj.jobs) > 0: + sleep(float(arguments['--poll'])) + except (DlpxException, RequestError, JobError, HttpError) as e: + print_exception('Error in dx_rewind_vdb: {}\n{}'.format( + engine['hostname'], e)) + sys.exit(1) + + +def time_elapsed(time_start): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + + time_start: float containing start time of the script. + """ + return round((time() - time_start)/60, +1) + + +def run_job(dlpx_obj, config_file_path): + """ + This function runs the main_workflow aynchronously against all the + servers specified + + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession + :param config_file_path: string containing path to configuration file. + :type config_file_path: str + """ + + # Create an empty list to store threads we create. + threads = [] + engine = None + + # If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info('Executing against all Delphix Engines in the dxtools.conf') + try: + # For each server in the dxtools.conf... + for delphix_engine in dlpx_obj.dlpx_engines: + engine = dlpx_obj.dlpx_engines[delphix_engine] + # Create a new thread and add it to the list. + threads.append(main_workflow(engine, dlpx_obj)) + except DlpxException as e: + print_exception('Error encountered in run_job():\n{}'.format(e)) + sys.exit(1) + + elif arguments['--all'] is False: + # Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dlpx_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: {}\n'.format( + arguments['--engine'])) + except (DlpxException, RequestError, KeyError): + raise DlpxException('\nERROR: Delphix Engine {} cannot be ' + 'found in {}. Please check your value and' + ' try again. Exiting.\n'.format( + arguments['--engine'], config_file_path)) + else: + # Else search for a default engine in the dxtools.conf + for delphix_engine in dlpx_obj.dlpx_engines: + if dlpx_obj.dlpx_engines[delphix_engine]['default'] == 'true': + engine = dlpx_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: {}'.format( + dlpx_obj.dlpx_engines[delphix_engine]['hostname'])) + break + + if engine is None: + raise DlpxException('\nERROR: No default engine found. Exiting') + + # run the job against the engine + threads.append(main_workflow(engine, dlpx_obj)) + + # For each thread in the list... + for each in threads: + # join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def main(): + # We want to be able to call on these variables anywhere in the script. + global single_thread + global debug + + time_start = time() + single_thread = False + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + config_file_path = arguments['--config'] + # Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + # This is the function that will handle processing main_workflow for + # all the servers. + run_job(dx_session_obj, config_file_path) + + elapsed_minutes = time_elapsed(time_start) + print_info('script took {:.2f} minutes to get this far.'.format( + elapsed_minutes)) + + # Here we handle what we do when the unexpected happens + except SystemExit as e: + # This is what we use to handle our sys.exit(#) + sys.exit(e) + + except DlpxException as e: + # We use this exception handler when an error occurs in a function call. + print_exception('ERROR: Please check the ERROR message below:\n' + '{}'.format(e.message)) + sys.exit(2) + + except HttpError as e: + # We use this exception handler when our connection to Delphix fails + print_exception('ERROR: Connection failed to the Delphix Engine. Please' + 'check the ERROR message below:\n{}'.format(e.message)) + sys.exit(2) + + except JobError as e: + # We use this exception handler when a job fails in Delphix so that we + # have actionable data + print_exception('A job failed in the Delphix Engine:\n{}'.format(e.job)) + elapsed_minutes = time_elapsed(time_start) + print_exception('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + sys.exit(3) + + except KeyboardInterrupt: + # We use this exception handler to gracefully handle ctrl+c exits + print_debug('You sent a CTRL+C to interrupt the process') + elapsed_minutes = time_elapsed(time_start) + print_info('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + except: + # Everything else gets caught here + print_exception('{}\n{}'.format(sys.exc_info()[0], + traceback.format_exc())) + elapsed_minutes = time_elapsed(time_start) + print_info("{} took {:.2f} minutes to get this far".format( + basename(__file__), elapsed_minutes)) + sys.exit(1) + +if __name__ == "__main__": + # Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + + # Feed our arguments to the main function, and off we go! + main() diff --git a/v1_8_2/dx_skel.py b/v1_8_2/dx_skel.py new file mode 100755 index 0000000..1ea6bce --- /dev/null +++ b/v1_8_2/dx_skel.py @@ -0,0 +1,315 @@ +#!/usr/bin/env python +# Corey Brune - Feb 2017 +#Description: +# This is a skeleton script which has all of the common functionality. +# The developer will only need to add the necessary arguments and functions +# then make the function calls in main_workflow(). +#Requirements +#pip install docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. +"""Description +Usage: + dx_skel.py () + [--engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_skel.py -h | --help | -v | --version +Description + +Examples: + + +Options: + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_skel.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = 'v.0.0.000' + +import sys +from os.path import basename +from time import sleep, time +from docopt import docopt + +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy.web import job + +from lib.DlpxException import DlpxException +from lib.DxLogging import logging_est +from lib.DxLogging import print_debug +from lib.DxLogging import print_info +from lib.DxLogging import print_exception +from lib.GetReferences import find_obj_by_name +from lib.GetSession import GetSession + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + E.g.: + @run_async + def task1(): + do_something + @run_async + def task2(): + do_something_too + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +@run_async +def main_workflow(engine): + """ + This function actually runs the jobs. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + + engine: Dictionary of engines + """ + try: + #Setup the connection to the Delphix Engine + dx_session_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + + if arguments['--vdb']: + #Get the database reference we are copying from the database name + database_obj = find_obj_by_name(dx_session_obj.server_session, + database, arguments['--vdb']) + + except DlpxException as e: + print_exception('\nERROR: Engine {} encountered an error while' + '{}:\n{}\n'.format(engine['hostname'], + arguments['--target'], e)) + sys.exit(1) + + thingstodo = ["thingtodo"] + try: + with dx_session_obj.job_mode(single_thread): + while (len(dx_session_obj.jobs) > 0 or len(thingstodo)> 0): + if len(thingstodo) > 0: + if OPERATION: + method_call + + elif OPERATION: + method_call + thingstodo.pop() + # get all the jobs, then inspect them + i = 0 + for j in dx_session_obj.jobs.keys(): + job_obj = job.get(dx_session_obj.server_session, + dx_session_obj.jobs[j]) + print_debug(job_obj) + print_info('{}: Replication operations: {}'.format( + engine['hostname'], job_obj.job_state)) + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + # If the job is in a non-running state, remove it + # from the + # running jobs list. + del dx_session_obj.jobs[j] + elif job_obj.job_state in 'RUNNING': + # If the job is in a running state, increment the + # running job count. + i += 1 + print_info('{}: {:d} jobs running.'.format( + engine['hostname'], i)) + # If we have running jobs, pause before repeating the + # checks. + if len(dx_session_obj.jobs) > 0: + sleep(float(arguments['--poll'])) + + except (HttpError, RequestError, JobError, DlpxException) as e: + print_exception('ERROR: Could not complete replication ' + 'operation:{}'.format(e)) + + +def run_job(): + """ + This function runs the main_workflow aynchronously against all the servers + specified + """ + #Create an empty list to store threads we create. + threads = [] + engine = None + + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + + try: + #For each server in the dxtools.conf... + for delphix_engine in dx_session_obj.dlpx_engines: + engine = dx_session_obj[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + + except DlpxException as e: + print 'Error encountered in run_job():\n{}'.format(e) + sys.exit(1) + + elif arguments['--all'] is False: + #Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dx_session_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: {}\n'.format( + (arguments['--engine']))) + + except (DlpxException, RequestError, KeyError) as e: + raise DlpxException('\nERROR: Delphix Engine {} cannot be ' + 'found in {}. Please check your value ' + 'and try again. Exiting.\n'.format( + arguments['--engine'], config_file_path)) + + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dx_session_obj.dlpx_engines: + if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ + 'true': + + engine = dx_session_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: {}'.format( + dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) + + break + + if engine == None: + raise DlpxException("\nERROR: No default engine found. Exiting") + + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + #elapsed_minutes = round((time() - time_start)/60, +1) + #return elapsed_minutes + return round((time() - time_start)/60, +1) + + +def main(arguments): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global usebackup + global time_start + global config_file_path + global dx_session_obj + global debug + + if arguments['--debug']: + debug = True + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + single_thread = False + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + #This is the function that will handle processing main_workflow for + # all the servers. + run_job() + + elapsed_minutes = time_elapsed() + print_info('script took {:.2f} minutes to get this far.'.format( + elapsed_minutes)) + + #Here we handle what we do when the unexpected happens + except DlpxException as e: + print_exception('script encountered an error while processing the' + 'config file:\n{}'.format(e)) + + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print_exception('Connection failed to the Delphix Engine' + 'Please check the ERROR message:\n{}'.format(e)) + sys.exit(1) + + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so that + we have actionable data + """ + elapsed_minutes = time_elapsed() + print_exception('A job failed in the Delphix Engine') + print_info('{} took {:.2f} minutes to get this far\n{}'.format( + basename(__file__), elapsed_minutes, e)) + sys.exit(3) + + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far\n'.format( + basename(__file__), elapsed_minutes)) + + except: + """ + Everything else gets caught here + """ + print_exception(sys.exc_info()[0]) + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far\n'.format( + basename(__file__), elapsed_minutes)) + sys.exit(1) + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + #Feed our arguments to the main function, and off we go! + main(arguments) diff --git a/v1_8_2/dx_snapshot_db.py b/v1_8_2/dx_snapshot_db.py new file mode 100755 index 0000000..5945f48 --- /dev/null +++ b/v1_8_2/dx_snapshot_db.py @@ -0,0 +1,619 @@ +#!/usr/bin/env python +#Adam Bowen - Apr 2016 +#This script snapshots a vdb or dSource +#Corey Brune - March 2017 +# Updated to allow backup of Sybase +#requirements +#pip install docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. This thing is brilliant. +"""Snapshot dSources and VDB's + +Usage: + dx_snapshot_db.py (--group [--name ] | --all_dbs ) + [--engine | --all] + [--usebackup] [--bck_file ] [--debug] [--parallel ] + [--poll ][--create_bckup] + [--config ] [--logdir ] + dx_snapshot_db.py (--host [--group ] [--object_type ] + | --object_type [--group ] [--host ] ) + [-d | --engine | --all] + [--usebackup] [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_snapshot_db.py -h | --help | -v | --version + +Snapshot a Delphix dSource or VDB + +Examples: + dx_snapshot_db.py --group "Sources" --object_type dsource --usebackup + dx_snapshot_db.py --name "Employee Oracle 11G DB" + dx_snapshot_db.py --host LINUXSOURCE --parallel 2 --usebackup + dx_snapshot_db.py --name dbw2 --usebackup --group Sources --create_bckup + dx_snapshot_db.py --name dbw2 --usebackup --group Sources --bck_file dbw2_full_20170317_001.dmp + dx_snapshot_db.py --host LINUXSOURCE --parallel 4 --usebackup --debug -d landsharkengine + + + +Options: + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --all_dbs Run against all database objects + --bck_file Name of the specific ASE Sybase backup file(s). + --name Name of object in Delphix to execute against. + --group Name of group in Delphix to execute against. + --host Name of environment in Delphix to execute against. + --object_type dsource or vdb. + --usebackup Snapshot using "Most Recent backup". + Available for MSSQL and ASE only. + --create_bckup Create and ingest a new Sybase backup + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_snapshot_db.log] + -h --help Show this screen. + -v --version Show version. + +""" + +VERSION="v.0.0.100" + + +from docopt import docopt +import logging +from os.path import basename +import signal +import sys +import time +import traceback +import json + +from multiprocessing import Process +from time import sleep, time + +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.exceptions import HttpError, JobError +from delphixpy import job_context +from delphixpy.web import database, environment, group, job, source, user +from delphixpy.web.vo import ASESpecificBackupSyncParameters, ASENewBackupSyncParameters, ASELatestBackupSyncParameters, MSSqlSyncParameters + +def ase_latest_backup_sync_parameters(): + obj = ASELatestBackupSyncParameters() + +def find_obj_by_name(engine, server, f_class, obj_name): + """ + Function to find objects by name and object class, and return object's reference as a string + You might use this function to find objects like groups. + """ + print_debug(engine["hostname"] + ": Searching objects in the " + f_class.__name__ + " class\n for one named \"" + obj_name +"\"") + obj_ref = '' + + all_objs = f_class.get_all(server) + for obj in all_objs: + if obj.name == obj_name: + print_debug(engine["hostname"] + ": Found a match " + str(obj.reference)) + return obj + +def find_all_databases_by_group_name(engine, server, group_name, exclude_js_container=False): + """ + Easy way to quickly find databases by group name + """ + + #First search groups for the name specified and return its reference + group_obj = find_obj_by_name(engine, server, group, group_name) + if group_obj: + databases=database.get_all(server, group=group_obj.reference, no_js_container_data_source=exclude_js_container) + return databases + +def find_database_by_name_and_group_name(engine, server, group_name, database_name): + + databases = find_all_databases_by_group_name(engine, server, group_name) + + for each in databases: + if each.name == database_name: + print_debug(engine["hostname"] + ": Found a match " + str(each.reference)) + return each + print_info("Unable to find \"" + database_name + "\" in " + group_name) + +def find_source_by_database(engine, server, database_obj): + #The source tells us if the database is enabled/disables, virtual, vdb/dSource, or is a staging database. + source_obj = source.get_all(server, database=database_obj.reference) + #We'll just do a little sanity check here to ensure we only have a 1:1 result. + if len(source_obj) == 0: + print_error(engine["hostname"] + ": Did not find a source for " + database_obj.name + ". Exiting") + sys.exit(1) + elif len(source_obj) > 1: + print_error(engine["hostname"] + ": More than one source returned for " + database_obj.name + ". Exiting") + print_error(source_obj) + sys.exit(1) + return source_obj + +def get_config(config_file_path): + """ + This function reads in the dxtools.conf file + """ + #First test to see that the file is there and we can open it + try: + config_file = open(config_file_path).read() + except: + print_error("Was unable to open " + config_file_path + ". Please check the path and permissions, then try again.") + sys.exit(1) + #Now parse the file contents as json and turn them into a python dictionary, throw an error if it isn't proper json + try: + config = json.loads(config_file) + except: + print_error("Was unable to read " + config_file_path + " as json. Please check file in a json formatter and try again.") + sys.exit(1) + #Create a dictionary of engines (removing the data node from the dxtools.json, for easier parsing) + delphix_engines = {} + for each in config['data']: + delphix_engines[each['hostname']] = each + print_debug(delphix_engines) + return delphix_engines + +def logging_est(logfile_path): + """ + Establish Logging + """ + global debug + logging.basicConfig(filename=logfile_path,format='%(levelname)s:%(asctime)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') + print_info("Welcome to " + basename(__file__) + ", version " + VERSION) + global logger + debug = arguments['--debug'] + logger = logging.getLogger() + if debug == True: + logger.setLevel(10) + print_info("Debug Logging is enabled.") + +def job_mode(server): + """ + This function tells Delphix how to execute jobs, based on the single_thread variable at the beginning of the file + """ + #Synchronously (one at a time) + if single_thread == True: + job_m = job_context.sync(server) + print_debug("These jobs will be executed synchronously") + #Or asynchronously + else: + job_m = job_context.async(server) + print_debug("These jobs will be executed asynchronously") + return job_m + +def job_wait(): + """ + This job stops all work in the thread/process until jobs are completed. + """ + #Grab all the jos on the server (the last 25, be default) + all_jobs = job.get_all(server) + #For each job in the list, check to see if it is running (not ended) + for jobobj in all_jobs: + if not (jobobj.job_state in ["CANCELED", "COMPLETED", "FAILED"]): + print_debug("Waiting for " + jobobj.reference + " (currently: " + jobobj.job_state+ ") to finish running against the container") + #If so, wait + job_context.wait(server,jobobj.reference) + +def on_exit(sig, func=None): + """ + This function helps us end cleanly and with exit codes + """ + print_info("Shutdown Command Received") + print_info("Shutting down " + basename(__file__)) + sys.exit(0) + +def print_debug(print_obj): + """ + Call this function with a log message to prefix the message with DEBUG + """ + try: + if debug == True: + print "DEBUG: " + str(print_obj) + logging.debug(str(print_obj)) + except: + pass + +def print_error(print_obj): + """ + Call this function with a log message to prefix the message with ERROR + """ + print "ERROR: " + str(print_obj) + logging.error(str(print_obj)) + +def print_info(print_obj): + """ + Call this function with a log message to prefix the message with INFO + """ + print "INFO: " + str(print_obj) + logging.info(str(print_obj)) + +def print_warning(print_obj): + """ + Call this function with a log message to prefix the message with WARNING + """ + print "WARNING: " + str(print_obj) + logging.warning(str(print_obj)) + +def serversess(f_engine_address, f_engine_username, f_engine_password): + """ + Function to setup the session with the Delphix Engine + """ + server_session= DelphixEngine(f_engine_address, f_engine_username, f_engine_password, "DOMAIN") + return server_session + +def set_exit_handler(func): + """ + This function helps us set the correct exit code + """ + signal.signal(signal.SIGTERM, func) + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + + E.g.: + @run_async + def task1(): + do_something + + @run_async + def task2(): + do_something_too + + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + +@run_async +def main_workflow(engine): + """ + This function is where the main workflow resides. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + """ + + #Pull out the values from the dictionary for this engine + engine_address = engine["ip_address"] + engine_username = engine["username"] + engine_password = engine["password"] + #Establish these variables as empty for use later + databases = [] + environment_obj = None + source_objs = None + jobs = {} + + + #Setup the connection to the Delphix Engine + server = serversess(engine_address, engine_username, engine_password) + + #If an environment/server was specified + if host_name: + print_debug(engine["hostname"] + ": Getting environment for " + host_name) + #Get the environment object by the hostname + environment_obj = find_obj_by_name(engine, server, environment, host_name) + if environment_obj != None: + #Get all the sources running on the server + env_source_objs = source.get_all(server, environment=environment_obj.reference) + #If the server doesn't have any objects, exit. + if env_source_objs == None: + print_error(host_name + "does not have any objects. Exiting") + sys.exit(1) + #If we are only filtering by the server, then put those objects in the main list for processing + if not(arguments['--group'] and database_name): + source_objs = env_source_objs + all_dbs = database.get_all(server, no_js_container_data_source=False) + databases = [] + for source_obj in source_objs: + if source_obj.staging == False and source_obj.virtual == True: + database_obj = database.get(server, source_obj.container) + if database_obj in all_dbs: + databases.append(database_obj) + else: + print_error(engine["hostname"] + ":No environment found for " + host_name + ". Exiting") + sys.exit(1) + #If we specified a specific database by name.... + if arguments['--name']: + #Get the database object from the name + database_obj = find_database_by_name_and_group_name(engine, server, arguments['--group'], arguments['--name']) + if database_obj: + databases.append(database_obj) + #Else if we specified a group to filter by.... + elif arguments['--group']: + print_debug(engine["hostname"] + ":Getting databases in group " + arguments['--group']) + #Get all the database objects in a group. + databases = find_all_databases_by_group_name(engine, server, arguments['--group']) + #Else, if we said all vdbs ... + elif arguments['--all_dbs'] and not arguments['--host'] : + #Grab all databases + databases = database.get_all(server, no_js_container_data_source=False) + elif arguments['--object_type'] and not arguments['--host'] : + databases = database.get_all(server) + if not databases or len(databases) == 0: + print_error("No databases found with the criterion specified") + return + #reset the running job count before we begin + i = 0 + with job_mode(server): + #While there are still running jobs or databases still to process.... + while (len(jobs) > 0 or len(databases) > 0): + #While there are databases still to process and we are still under + #the max simultaneous jobs threshold (if specified) + while len(databases) > 0 and (arguments['--parallel'] == None or i < int(arguments['--parallel'])): + #Give us the next database in the list, and remove it from the list + database_obj = databases.pop() + #Get the source of the database. + #The source tells us if the database is enabled/disables, virtual, vdb/dSource, or is a staging database. + source_obj = find_source_by_database(engine, server, database_obj) + #If we applied the environment/server filter AND group filter, find the intersecting matches + if environment_obj != None and (arguments['--group']): + match = False + for env_source_obj in env_source_objs: + if source_obj[0].reference in env_source_obj.reference: + match = True + break + if match == False: + print_error(engine["hostname"] + ": " + database_obj.name + " does not exist on " + host_name + ". Exiting") + return + #Snapshot the database + snapshot_job = snapshot_database(engine, server, jobs, source_obj[0], database_obj, arguments['--object_type']) + #If snapshot_job has any value, then we know that a job was initiated. + if snapshot_job: + #increment the running job count + i += 1 + #Check to see if we are running at max parallel processes, and report if so. + if ( arguments['--parallel'] != None and i >= int(arguments['--parallel'])): + print_info(engine["hostname"] + ": Max jobs reached (" + str(i) + ")") + #reset the running jobs counter, as we are about to update the count from the jobs report. + i = update_jobs_dictionary(engine, server, jobs) + print_info(engine["hostname"] + ": " + str(i) + " jobs running. " + str(len(databases)) + " jobs waiting to run") + #If we have running jobs, pause before repeating the checks. + if len(jobs) > 0: + sleep(float(arguments['--poll'])) + +def run_job(engine): + """ + This function runs the main_workflow aynchronously against all the servers specified + """ + #Create an empty list to store threads we create. + threads = [] + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + #For each server in the dxtools.conf... + for delphix_engine in dxtools_objects: + engine = dxtools_objects[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + else: + #Else if the --engine argument was given, test to see if the engine exists in dxtools.conf + if arguments['--engine']: + try: + engine = dxtools_objects[arguments['--engine']] + print_info("Executing against Delphix Engine: " + arguments['--engine']) + except: + print_error("Delphix Engine \"" + arguments['--engine'] + "\" cannot be found in " + config_file_path) + print_error("Please check your value and try again. Exiting") + sys.exit(1) + #Else if the -d argument was given, test to see if the engine exists in dxtools.conf + elif arguments['-d']: + try: + engine = dxtools_objects[arguments['-d']] + print_info("Executing against Delphix Engine: " + arguments['-d']) + except: + print_error("Delphix Engine \"" + arguments['-d'] + "\" cannot be found in " + config_file_path) + print_error("Please check your value and try again. Exiting") + sys.exit(1) + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dxtools_objects: + if dxtools_objects[delphix_engine]['default'] == 'true': + engine = dxtools_objects[delphix_engine] + print_info("Executing against the default Delphix Engine in the dxtools.conf: " + dxtools_objects[delphix_engine]['hostname']) + break + if engine == None: + print_error("No default engine found. Exiting") + sys.exit(1) + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete before moving on + each.join() + +def snapshot_database(engine, server, jobs, source_obj, container_obj, obj_type=None): + """ + This function + FYI - Snapshot is also called sync + """ + #Sanity check to make sure our source object has a reference + if source_obj.reference != None : + #If we specified the --object_type flag, ensure this source is a match. Skip, if not. + if obj_type != None and ((obj_type.lower() == "vdb" and source_obj.virtual != True ) or (obj_type.lower() == "dsource" and source_obj.virtual != False )): + print_warning(engine["hostname"] + ": " + container_obj.name + " is not a " + obj_type.lower() + ". Skipping sync") + #Ensure this source is not a staging database. We can't act upon those. + elif source_obj.staging == True: + print_warning(engine["hostname"] + ": " + container_obj.name + " is a staging database. Skipping.") + #Ensure the source is enabled. We can't snapshot disabled databases. + elif source_obj.runtime.enabled == "ENABLED" : + print_info(engine["hostname"] + ": Syncing " + container_obj.name ) + print_debug(engine["hostname"] + ": Type: " + source_obj.type ) + print_debug(engine["hostname"] + ": " +source_obj.type) + #If the database is a dSource and a MSSQL type, we need to tell Delphix how we want to sync the database... + #Delphix will just ignore the extra parameters if it is a VDB, so we will omit any extra code to check + if (source_obj.type == "MSSqlLinkedSource"): + sync_params = MSSqlSyncParameters() + #From last backup? + if usebackup == True: + sync_params.load_from_backup = True + print_info(engine["hostname"] + ": MSSQL database. Creating snapshot of " + container_obj.name + " from Latest Full backup.") + #Or take a new backup? + else: + sync_params.load_from_backup = False + print_info(engine["hostname"] + ": MSSQL database. Creating snapshot of " + container_obj.name + " from New Full backup.") + print_debug(engine["hostname"] + ": " +str(sync_params)) + #Sync it + database.sync(server, container_obj.reference, sync_params) + #Else if the database is a dSource and a ASE type, we need also to tell Delphix how we want to sync the database... + #Delphix will just ignore the extra parameters if it is a VDB, so we will omit any extra code to check + elif (source_obj.type == "ASELinkedSource"): + if usebackup == True: + if arguments['--bck_file']: + sync_params = ASESpecificBackupSyncParameters() + sync_params.backup_files = (arguments['--bck_file']).split(' ') + elif arguments['--create_bckup']: + sync_params = ASENewBackupSyncParameters() + else: + sync_params = ASELatestBackupSyncParameters() + print_info(engine["hostname"] + ": ASE database. Creating snapshot of " + container_obj.name + " from Latest Full backup.") + #Or take a new backup? + else: + sync_params = ASENewBackupSyncParameters() + print_info(engine["hostname"] + ": ASE database. Creating snapshot of " + container_obj.name + " from Full backup.") + print_debug(engine["hostname"] + ": " +str(sync_params)) + #Sync it + database.sync(server, container_obj.reference, sync_params) + #If it isn't MSSQL or ASE, Delphix can just go ahead and sync the database + else: + #Sync it + database.sync(server, container_obj.reference) + #Add the job into the jobs dictionary so we can track its progress + jobs[container_obj] = server.last_job + #return the job object to the calling statement so that we can tell if a job was created or not (will return None, if no job) + return server.last_job + #Don't do anything if the database is disabled + else: + print_warning(engine["hostname"] + ": " + container_obj.name + " is not enabled. Skipping sync") + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + elapsed_minutes = round((time() - time_start)/60, +1) + return elapsed_minutes + +def update_jobs_dictionary(engine, server, jobs): + """ + This function checks each job in the dictionary and updates its status or removes it if the job is complete. + Return the number of jobs still running. + """ + #Establish the running jobs counter, as we are about to update the count from the jobs report. + i = 0 + #get all the jobs, then inspect them + for j in jobs.keys(): + job_obj = job.get(server, jobs[j]) + print_debug(engine["hostname"] + ": " + str(job_obj)) + print_info(engine["hostname"] + ": " + j.name + ": " + job_obj.job_state) + + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + #If the job is in a non-running state, remove it from the running jobs list. + del jobs[j] + else: + #If the job is in a running state, increment the running job count. + i += 1 + return i + +def main(argv): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global usebackup + global time_start + global host_name + global database_name + global config_file_path + global dxtools_objects + + + + try: + #Declare globals that will be used throughout the script. + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + engine = None + single_thread = False + usebackup = arguments['--usebackup'] + database_name = arguments['--name'] + host_name = arguments['--host'] + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dxtools_objects = get_config(config_file_path) + + #This is the function that will handle processing main_workflow for all the servers. + run_job(engine) + + elapsed_minutes = time_elapsed() + print_info("script took " + str(elapsed_minutes) + " minutes to get this far.") + + + #Here we handle what we do when the unexpected happens + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print_error("Connection failed to the Delphix Engine") + print_error( "Please check the ERROR message below") + print_error(e.message) + sys.exit(2) + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so that we have actionable data + """ + print_error("A job failed in the Delphix Engine") + print_error(e.job) + elapsed_minutes = time_elapsed() + print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") + sys.exit(3) + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") + except: + """ + Everything else gets caught here + """ + print_error(sys.exc_info()[0]) + print_error(traceback.format_exc()) + elapsed_minutes = time_elapsed() + print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") + sys.exit(1) + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + #I added this below condition to account for my --name | or AT LEAST ONE OF --group --host --object_type + #I couldn't quite sort it out with docopt. Maybe I'm just dense today. + #Anyway, if none of the four options are given, print the __doc__ and exit. + if not(arguments['--name']) and not(arguments['--group']) and not(arguments['--host']) and not(arguments['--object_type']) and not(arguments['--all_dbs']): + print(__doc__) + sys.exit() + #Feed our arguments to the main function, and off we go! + main(arguments) diff --git a/v1_8_2/dx_update_env.py b/v1_8_2/dx_update_env.py new file mode 100755 index 0000000..a343079 --- /dev/null +++ b/v1_8_2/dx_update_env.py @@ -0,0 +1,326 @@ +#!/usr/bin/env python +# Corey Brune - Feb 2017 +#Description: +# Update Environment +# +#Requirements +#pip install docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. +"""Description +Usage: + dx_update_env.py (--pw --env_name ) + [--engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_update_env.py -h | --help | -v | --version +Description + +Examples: + + +Options: + --pw Password + --env_name Name of the environment + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_operations_vdb.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = 'v.0.0.001' + +import sys +from os.path import basename +from time import sleep, time +from docopt import docopt + +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy.web import job +from delphixpy.web import environment +from delphixpy.web.vo import ASEHostEnvironmentParameters +from delphixpy.web.vo import UnixHostEnvironment + + +from lib.DlpxException import DlpxException +from lib.DxLogging import logging_est +from lib.DxLogging import print_debug +from lib.DxLogging import print_info +from lib.DxLogging import print_exception +from lib.GetReferences import find_obj_by_name +from lib.GetSession import GetSession + +def update_ase_db_pw(): + + env_obj = UnixHostEnvironment() + env_obj.ase_host_environment_parameters = ASEHostEnvironmentParameters() + env_obj.ase_host_environment_parameters.credentials = {'type': + 'PasswordCredential', + 'password': arguments['--pw']} + + try: + environment.update(dx_session_obj.server_session, find_obj_by_name( + dx_session_obj.server_session, environment, + arguments['--env_name'], env_obj).reference, env_obj) + + except (HttpError, RequestError) as e: + print_exception('Could not update ASE DB Password:\n{}'.format(e)) + sys.exit(1) + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + E.g.: + @run_async + def task1(): + do_something + @run_async + def task2(): + do_something_too + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +@run_async +def main_workflow(engine): + """ + This function actually runs the jobs. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + + engine: Dictionary of engines + """ + jobs = {} + + try: + #Setup the connection to the Delphix Engine + dx_session_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + + except DlpxException as e: + print_exception('\nERROR: Engine %s encountered an error while' + '%s:\n%s\n' % (engine['hostname'], + arguments['--target'], e)) + sys.exit(1) + + thingstodo = ["thingtodo"] + #reset the running job count before we begin + i = 0 + with dx_session_obj.job_mode(single_thread): + while (len(jobs) > 0 or len(thingstodo)> 0): + if len(thingstodo)> 0: + if arguments['--pw']: + update_ase_db_pw() + + #elif OPERATION: + # method_call + + thingstodo.pop() + + #get all the jobs, then inspect them + i = 0 + for j in jobs.keys(): + job_obj = job.get(dx_session_obj.server_session, jobs[j]) + print_debug(job_obj) + print_info(engine["hostname"] + ": VDB Operations: " + + job_obj.job_state) + + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + #If the job is in a non-running state, remove it from the + # running jobs list. + del jobs[j] + else: + #If the job is in a running state, increment the running + # job count. + i += 1 + + print_info(engine["hostname"] + ": " + str(i) + " jobs running. ") + #If we have running jobs, pause before repeating the checks. + if len(jobs) > 0: + sleep(float(arguments['--poll'])) + + +def run_job(): + """ + This function runs the main_workflow aynchronously against all the servers + specified + """ + #Create an empty list to store threads we create. + threads = [] + + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + + try: + #For each server in the dxtools.conf... + for delphix_engine in dx_session_obj.dlpx_engines: + engine = dx_session_obj[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + + except DlpxException as e: + print 'Error encountered in run_job():\n%s' % (e) + sys.exit(1) + + elif arguments['--all'] is False: + #Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dx_session_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: %s\n' % + (arguments['--engine'])) + + except (DlpxException, RequestError, KeyError) as e: + raise DlpxException('\nERROR: Delphix Engine %s cannot be ' + 'found in %s. Please check your value ' + 'and try again. Exiting.\n%s\n' % ( + arguments['--engine'], config_file_path, e)) + + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dx_session_obj.dlpx_engines: + if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ + 'true': + + engine = dx_session_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: %s' % ( + dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) + + break + + if engine == None: + raise DlpxException("\nERROR: No default engine found. Exiting") + + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + elapsed_minutes = round((time() - time_start)/60, +1) + return elapsed_minutes + + +def main(argv): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global usebackup + global time_start + global config_file_path + global database_name + global dx_session_obj + global debug + + if arguments['--debug']: + debug = True + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + single_thread = False + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + #This is the function that will handle processing main_workflow for + # all the servers. + run_job() + + elapsed_minutes = time_elapsed() + print_info("script took " + str(elapsed_minutes) + + " minutes to get this far.") + + #Here we handle what we do when the unexpected happens + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print_exception('Connection failed to the Delphix Engine' + 'Please check the ERROR message below') + sys.exit(1) + + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so that + we have actionable data + """ + elapsed_minutes = time_elapsed() + print_exception('A job failed in the Delphix Engine') + print_info('%s took %s minutes to get this far\n' % + (basename(__file__), str(elapsed_minutes))) + sys.exit(3) + + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info('%s took %s minutes to get this far\n' % + (basename(__file__), str(elapsed_minutes))) + + except: + """ + Everything else gets caught here + """ + print_exception(sys.exc_info()[0]) + elapsed_minutes = time_elapsed() + print_info('%s took %s minutes to get this far\n' % + (basename(__file__), str(elapsed_minutes))) + sys.exit(1) + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + #Feed our arguments to the main function, and off we go! + main(arguments) diff --git a/v1_8_2/dx_users.py b/v1_8_2/dx_users.py new file mode 100755 index 0000000..e7e08cd --- /dev/null +++ b/v1_8_2/dx_users.py @@ -0,0 +1,440 @@ +#!/usr/bin/env python +# Adam Bowen - Aug 2017 +#Description: +# This script will allow you to easily manage users in Delphix +# This script currently only supports Native authentication +# +#Requirements +#pip install docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. +"""Description +Usage: + dx_users.py (--user_name [(--add --password --email [--jsonly]) |--delete]) + [--engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_users.py --update --user_name [ --password ] [--email ] [ --delete ] [--jsonly] + [--engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_users.py (--list) + [--engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + dx_users.py -h | --help | -v | --version +Description + +Examples: + dx_users.py --add --user_name dev --password delphix --email "test@something.com" --jsonly + dx_users.py --debug --config delphixpy-examples/dxtools_1.conf --update --user_name dev --password not_delphix --email "test@somethingelse.com" + dx_users.py --delete --user_name dev + dx_users.py --list + +Options: + --user_name The name of the user + --password The password of the user to be created/updated + --email The email addres of the user to be created/updated + --jsonly Designate the user as a Jet Stream Only User + --add Add the identified user + --update Update the identified user + --delete Delete the identified user + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_skel.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = 'v.0.0.003' + +import sys +from os.path import basename +from time import sleep, time +from docopt import docopt + +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy.web import authorization +from delphixpy.web import job +from delphixpy.web import user +from delphixpy.web import role +from delphixpy.web.vo import Authorization +from delphixpy.web.vo import User +from delphixpy.web.vo import PasswordCredential +from delphixpy.web.vo import CredentialUpdateParameters + +from lib.DlpxException import DlpxException +from lib.DxLogging import logging_est +from lib.DxLogging import print_debug +from lib.DxLogging import print_info +from lib.DxLogging import print_exception +from lib.GetReferences import find_obj_by_name +from lib.GetReferences import find_all_objects +from lib.GetSession import GetSession + +def add_user(user_name, user_password, user_email, jsonly=None): + """ + This function adds the user + """ + user_obj = User() + user_obj.name = user_name + user_obj.email_address = user_email + user_obj.credential = PasswordCredential() + user_obj.credential.password = user_password + + try: + user.create(dx_session_obj.server_session,user_obj) + print('Attempting to create {}'.format(user_name)) + except (DlpxException, RequestError) as e: + print_exception('\nERROR: Creating the user {} ' + 'encountered an error:\n{}'.format(user_name, e)) + sys.exit(1) + + js_only(user_name, jsonly) + +def js_only(user_name, jsonly=None): + """ + Switch the user to/from a jsonly user + """ + user_obj = find_obj_by_name(dx_session_obj.server_session, + user, user_name) + role_obj = find_obj_by_name(dx_session_obj.server_session, + role, "Jet Stream User") + + if jsonly: + authorization_obj = Authorization() + authorization_obj.role = role_obj.reference + authorization_obj.target = user_obj.reference + authorization_obj.user = user_obj.reference + + authorization.create(dx_session_obj.server_session, authorization_obj) + else: + + auth_name = "(" + user_obj.reference + ", " + role_obj.reference + ", " + user_obj.reference + ")" + authorization.delete(dx_session_obj.server_session,find_obj_by_name(dx_session_obj.server_session, + authorization, auth_name).reference) + +def update_user(user_name, user_password=None, user_email=None, jsonly=None): + """ + This function updates the user + """ + + if user_email: + updated_user_obj = User() + updated_user_obj.email_address = user_email + + try: + user.update(dx_session_obj.server_session,find_obj_by_name(dx_session_obj.server_session, + user, user_name).reference,updated_user_obj) + print('Attempting to update {}'.format(user_name)) + except (DlpxException, RequestError) as e: + print_exception('\nERROR: Updating the user {} ' + 'encountered an error:\n{}'.format(user_name, e)) + sys.exit(1) + + if user_password: + new_password_obj = CredentialUpdateParameters() + new_password_obj.new_credential = PasswordCredential() + new_password_obj.new_credential.password = user_password + + try: + user.update_credential(dx_session_obj.server_session,find_obj_by_name(dx_session_obj.server_session, + user, user_name).reference,new_password_obj) + print('Attempting to update {} password'.format(user_name)) + except (DlpxException, RequestError) as e: + print_exception('\nERROR: Updating the user {} password ' + 'encountered an error:\n{}'.format(user_name, e)) + sys.exit(1) + + js_only(user_name, jsonly) + +def delete_user(user_name): + """ + This function adds the user + """ + user_obj = find_obj_by_name(dx_session_obj.server_session, + user, user_name) + + + try: + user.delete(dx_session_obj.server_session,user_obj.reference) + print('Attempting to delete {}'.format(user_name)) + except (DlpxException, RequestError) as e: + print_exception('\nERROR: Deleting the user {} ' + 'encountered an error:\n{}'.format(user_name, e)) + sys.exit(1) + +def list_users(): + """ + This function lists all users + """ + user_list = find_all_objects(dx_session_obj.server_session, user) + + for user_obj in user_list: + print('User: {}'.format(user_obj.name)) + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + E.g.: + @run_async + def task1(): + do_something + @run_async + def task2(): + do_something_too + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +@run_async +def main_workflow(engine): + """ + This function actually runs the jobs. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + + engine: Dictionary of engines + """ + try: + #Setup the connection to the Delphix Engine + dx_session_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + + except DlpxException as e: + print_exception('\nERROR: Engine {} encountered an error while' + '{}:\n{}\n'.format(engine['hostname'], + arguments['--target'], e)) + sys.exit(1) + + thingstodo = ["thingtodo"] + try: + with dx_session_obj.job_mode(single_thread): + while (len(dx_session_obj.jobs) > 0 or len(thingstodo)> 0): + if len(thingstodo) > 0: + if arguments['--add'] : + add_user(arguments['--user_name'], arguments['--password'], arguments['--email'], arguments['--jsonly']) + elif arguments['--update'] : + update_user(arguments['--user_name'], arguments['--password'], arguments['--email'], arguments['--jsonly']) + elif arguments['--delete']: + delete_user(arguments['--user_name']) + elif arguments['--list']: + list_users() + thingstodo.pop() + # get all the jobs, then inspect them + i = 0 + for j in dx_session_obj.jobs.keys(): + job_obj = job.get(dx_session_obj.server_session, + dx_session_obj.jobs[j]) + print_debug(job_obj) + print_info('{}: User: {}'.format( + engine['hostname'], job_obj.job_state)) + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + # If the job is in a non-running state, remove it + # from the + # running jobs list. + del dx_session_obj.jobs[j] + elif job_obj.job_state in 'RUNNING': + # If the job is in a running state, increment the + # running job count. + i += 1 + print_info('{}: {:d} jobs running.'.format( + engine['hostname'], i)) + # If we have running jobs, pause before repeating the + # checks. + if len(dx_session_obj.jobs) > 0: + sleep(float(arguments['--poll'])) + + except (HttpError, RequestError, JobError, DlpxException) as e: + print_exception('ERROR: Could not complete user ' + 'operation: {}'.format(e)) + + +def run_job(): + """ + This function runs the main_workflow aynchronously against all the servers + specified + """ + #Create an empty list to store threads we create. + threads = [] + engine = None + + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + + try: + #For each server in the dxtools.conf... + for delphix_engine in dx_session_obj.dlpx_engines: + engine = dx_session_obj[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + + except DlpxException as e: + print 'Error encountered in run_job():\n{}'.format(e) + sys.exit(1) + + elif arguments['--all'] is False: + #Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dx_session_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: {}\n'.format( + (arguments['--engine']))) + + except (DlpxException, RequestError, KeyError) as e: + raise DlpxException('\nERROR: Delphix Engine {} cannot be ' + 'found in {}. Please check your value ' + 'and try again. Exiting.\n'.format( + arguments['--engine'], config_file_path)) + + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dx_session_obj.dlpx_engines: + if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ + 'true': + + engine = dx_session_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: {}'.format( + dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) + + break + + if engine == None: + raise DlpxException("\nERROR: No default engine found. Exiting") + + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + #elapsed_minutes = round((time() - time_start)/60, +1) + #return elapsed_minutes + return round((time() - time_start)/60, +1) + + +def main(arguments): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global usebackup + global time_start + global config_file_path + global dx_session_obj + global debug + + if arguments['--debug']: + debug = True + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + single_thread = False + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + #This is the function that will handle processing main_workflow for + # all the servers. + run_job() + + elapsed_minutes = time_elapsed() + print_info('script took {:.2f} minutes to get this far.'.format( + elapsed_minutes)) + + #Here we handle what we do when the unexpected happens + except DlpxException as e: + print_exception('script encountered an error while processing the' + 'config file:\n{}'.format(e)) + + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print_exception('Connection failed to the Delphix Engine' + 'Please check the ERROR message:\n{}'.format(e)) + sys.exit(1) + + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so that + we have actionable data + """ + elapsed_minutes = time_elapsed() + print_exception('A job failed in the Delphix Engine') + print_info('{} took {:.2f} minutes to get this far\n{}'.format( + basename(__file__), elapsed_minutes, e)) + sys.exit(3) + + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far\n'.format( + basename(__file__), elapsed_minutes)) + + except: + """ + Everything else gets caught here + """ + print_exception(sys.exc_info()[0]) + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far\n'.format( + basename(__file__), elapsed_minutes)) + sys.exit(1) + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + #Feed our arguments to the main function, and off we go! + main(arguments) diff --git a/v1_8_2/dxtools.conf b/v1_8_2/dxtools.conf new file mode 100755 index 0000000..68e331d --- /dev/null +++ b/v1_8_2/dxtools.conf @@ -0,0 +1,13 @@ +{ + "data":[ + { + "hostname":"landsharkengine", + "ip_address":"172.16.169.146", + "username":"delphix_admin", + "password":"delphix", + "port":"80", + "default":"true", + "encrypted":"false" + } + ] +} diff --git a/v1_8_2/engine_network_assignment.py b/v1_8_2/engine_network_assignment.py new file mode 100755 index 0000000..f546f2f --- /dev/null +++ b/v1_8_2/engine_network_assignment.py @@ -0,0 +1,249 @@ +#!/usr/bin/env python +''' +Adam Bowen - Jan 2016 +This script configures the Delphix Engine networking. +''' +VERSION="v.2.3.001" + +import getopt +import logging +from os.path import basename +import signal +import sys +import time +import traceback + +import errno +from socket import error as socket_error + +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.exceptions import HttpError,JobError +from delphixpy.web import network, system, user, service +from delphixpy.web.vo import PasswordCredential, User, NetworkInterface, \ + InterfaceAddress, DNSConfig, SystemInfo, NetworkRoute + +def system_serversess(f_engine_address, f_engine_username, f_engine_password): + ''' + Function to grab the server session + ''' + server_session= DelphixEngine(f_engine_address, f_engine_username, \ + f_engine_password, "SYSTEM") + return server_session + +def help(): + print( basename(__file__)+ ' [-e ] [-o - The IP to use to connect to the Delphix ' + 'Engine. \nEngine must be up, unconfigured, and console screen must be ' + 'green') + print('-p - will set the sysadmin user to this ' + 'password') + print('-n - will set the Delphix Engine to this ' + 'IP address \n(i.e. 10.0.1.10/24)') + print('-g - will set the default gateway to point to ' + 'this \nIP address') + print('-d - comma delimited string of dns servers to use \n' + '(i.e. \"4.2.2.2,192.168.2.1\"")') + print("-v - Print version information and exit") + sys.exit(2) + +def logging_est(): + ''' + Establish Logging + ''' + global debug + logging.basicConfig(filename='landshark_setup.log',\ + format='%(levelname)s:\%(asctime)s:%(message)s', level=logging.INFO, \ + datefmt='%Y-%m-%d %H:%M:%S') + print_info("Welcome to " + basename(__file__) + ", version " + VERSION) + global logger + debug = True + logger = logging.getLogger() + logger.setLevel(10) + print_info("Debug Logging is enabled.") + +def on_exit(sig, func=None): + print_info("Shutdown Command Received") + print_info("Shutting down prime_setup.py") + sys.exit(0) + +def print_debug(print_obj): + ''' + DEBUG Log-level + ''' + if debug == True: + print "DEBUG: " + str(print_obj) + logging.debug(str(print_obj)) + +def print_error(print_obj): + ''' + ERROR Log-level + ''' + print "ERROR: " + str(print_obj) + logging.error(str(print_obj)) + +def print_info(print_obj): + ''' + INFO Log-level + ''' + print "INFO: " + str(print_obj) + logging.info(str(print_obj)) + +def print_warning(print_obj): + ''' + WARNING Log-level + ''' + print "WARNING: " + str(print_obj) + logging.warning(str(print_obj)) + +def set_exit_handler(func): + signal.signal(signal.SIGTERM, func) + +def time_elapsed(): + elapsed_minutes = round((time.time() - time_start)/60, +1) + return elapsed_minutes + +def version(): + print("Version: " +VERSION) + logging_est() + set_exit_handler(on_exit) + sys.exit(1) + +def main(argv): + try: + logging_est() + global time_start + time_start = time.time() + engine_ip = "" + engine_pass = "" + dg = "" + dns_servers = "" + try: + opts,args = getopt.getopt(argv,"e:n:g:d:p:hv") + except getopt.GetoptError: + help() + for opt, arg in opts: + if opt == '-h': + help() + elif opt == '-e': + engine_ip = arg + elif opt == '-p': + engine_pass = arg + elif opt == '-n': + new_engine_cidr = arg + elif opt == '-g': + dg = arg + elif opt == '-d': + dns_servers = arg + elif opt == '-v': + version() + + if (engine_ip == "" or engine_pass == "" or new_engine_cidr == "" or \ + dg == "" or dns_servers == "") : + help() + + sys_server = system_serversess(engine_ip, "sysadmin", engine_pass) + + #Configure Static IP + primary_interface = network.interface.get_all(sys_server)[0].reference + print_debug("Primary interface identified as " + primary_interface) + ni_obj = NetworkInterface() + if_obj = InterfaceAddress() + if_obj.address = new_engine_cidr + if_obj.address_type = "STATIC" + #if_obj.addressType = "DHCP" + ni_obj.addresses = [if_obj] + #print_debug(str(ni_obj)) + try: + print_debug("Changing the IP address. This operation can take up to 60 seconds to complete") + network.interface.update(sys_server, primary_interface, ni_obj) + except socket_error as e: + if e.errno == errno.ETIMEDOUT: + print_debug("IP address changed") + else: + raise e + #if we made it this far, we need to operate on the new IP. + engine_ip = new_engine_cidr.split('/')[0] + print_debug("ENGINE IP: " + engine_ip) + #Now re-establish the server session + sys_server = system_serversess(engine_ip, "sysadmin", engine_pass) + + #configure DNS + print_debug("Setting DNS") + dns_obj = DNSConfig() + dns_obj.servers = dns_servers.split(",") + dns_obj.domain = [] + service.dns.set(sys_server, dns_obj) + + #configure hostname + print_debug("Setting hostname") + system_info = SystemInfo() + system_info.hostname = "LandsharkEngine" + system.set(sys_server,system_info) + + #configue default gateway + print_debug("Setting default gateway") + de_routes = network.route.get_all(sys_server) + print_debug("Current routes: ") + print_debug(str(de_routes)) + default_gateway = NetworkRoute() + default_gateway.destination = "default" + default_gateway.out_interface = primary_interface + #Check to see if a DG already exists. If so, delete it. + for de_route in de_routes: + if de_route.destination == 'default': + print_debug("Found an existing DG. Deleting it") + default_gateway.gateway = dg + network.route.delete(sys_server, default_gateway) + default_gateway.gateway = dg + print_debug("Adding new route") + network.route.add(sys_server, default_gateway) + de_routes = network.route.get_all(sys_server) + print_debug("New routes: ") + print_debug(str(de_routes)) + + + except SystemExit as e: + sys.exit(e) + except HttpError as e: + print_error("Connection failed to the Delphix Engine") + print_error( "Please check the ERROR message below") + print_error(e.message) + sys.exit(2) + except JobError as e: + print_error("A job failed in the Delphix Engine") + print_error(e.job) + elapsed_minutes = time_elapsed() + print_info("Prime took " + str(elapsed_minutes) + \ + " minutes to get this far.") + sys.exit(2) + except KeyboardInterrupt: + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info("Prime took " + str(elapsed_minutes) + \ + " minutes to get this far.") + sys.exit(2) + except socket_error as e: + print_error("Connection failed to the Delphix Engine") + print_error( "Please check the ERROR message below") + if e.errno == errno.ETIMEDOUT: + print_debug("Connection timed out trying to connect to " \ + + engine_ip) + else: + print_error(e.message) + sys.exit(2) + except: + print_error(sys.exc_info()[0]) + print_error(traceback.format_exc()) + elapsed_minutes = time_elapsed() + print_info("Prime took " + str(elapsed_minutes) + \ + " minutes to get this far.") + sys.exit(2) + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/v1_8_2/engine_setup.py b/v1_8_2/engine_setup.py new file mode 100755 index 0000000..54ff043 --- /dev/null +++ b/v1_8_2/engine_setup.py @@ -0,0 +1,203 @@ +#!/usr/bin/env python +''' +Adam Bowen - Jan 2016 +This script configures the sysadmin user and configures domain0 +Will come back and properly throw this with logging, etc +''' +VERSION="v.2.3.005" +CONTENTDIR="/u02/app/content" + +import getopt +import logging +from os.path import basename +import signal +import sys +import time +import traceback +import untangle + +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.exceptions import HttpError,JobError +from delphixpy.web import domain, storage, user +from delphixpy.web.vo import CredentialUpdateParameters, PasswordCredential, DomainCreateParameters, User +from lib.GetSession import GetSession + +def system_serversess(f_engine_address, f_engine_username, f_engine_password): + ''' + Function to grab the server session + ''' + server_session= DelphixEngine(f_engine_address, f_engine_username, f_engine_password, "SYSTEM") + return server_session + +def help(): + print("\n" + basename(__file__)+ " [-e ] [-o - Engine must be up, unconfigured, and console screen must be green") + print("-o - will use this password to initially access the system") + print("-p - will set the sysadmin user to this password") + print("-v - Print version information and exit") + sys.exit(2) + +def logging_est(): + ''' + Establish Logging + ''' + global debug + logging.basicConfig(filename='landshark_setup.log',format='%(levelname)s:%(asctime)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') + print_info("Welcome to " + basename(__file__) + ", version " + VERSION) + global logger + debug = True + logger = logging.getLogger() + logger.setLevel(10) + print_info("Debug Logging is enabled.") + +def on_exit(sig, func=None): + print_info("Shutdown Command Received") + print_info("Shutting down prime_setup.py") + sys.exit(0) + +def print_debug(print_obj): + ''' + DEBUG Log-level + ''' + if debug == True: + print "DEBUG: " + str(print_obj) + logging.debug(str(print_obj)) + +def print_error(print_obj): + ''' + ERROR Log-level + ''' + print "ERROR: " + str(print_obj) + logging.error(str(print_obj)) + +def print_info(print_obj): + ''' + INFO Log-level + ''' + print "INFO: " + str(print_obj) + logging.info(str(print_obj)) + +def print_warning(print_obj): + ''' + WARNING Log-level + ''' + print "WARNING: " + str(print_obj) + logging.warning(str(print_obj)) + +def set_exit_handler(func): + signal.signal(signal.SIGTERM, func) + +def time_elapsed(): + elapsed_minutes = round((time.time() - time_start)/60, +1) + return elapsed_minutes + +def version(): + print("Version: " +VERSION) + logging_est() + set_exit_handler(on_exit) + sys.exit(1) + +def main(argv): + try: + logging_est() + global time_start + time_start = time.time() + dx_session_obj = GetSession() + engine_ip = "" + engine_pass = "" + old_engine_pass = "" + try: + opts,args = getopt.getopt(argv,"e:o:p:hv") + except getopt.GetoptError: + help() + for opt, arg in opts: + if opt == '-h': + help() + elif opt == '-e': + engine_ip = arg + elif opt == '-o': + old_engine_pass = arg + elif opt == '-p': + engine_pass = arg + elif opt == '-v': + version() + + if (engine_ip == "" or engine_pass == "" or old_engine_pass == "") : + help() + + dx_session_obj.serversess(engine_ip, 'sysadmin', + old_engine_pass, 'SYSTEM') + + dx_session_obj.server_wait() + + sys_server = system_serversess(engine_ip, "sysadmin", old_engine_pass) + + if user.get(sys_server, "USER-1").email_address == None: + print_info("Setting sysadmin's email address") + sysadmin_user = User() + sysadmin_user.email_address = "spam@delphix.com" + user.update(sys_server, 'USER-1', sysadmin_user) + print_info("Setting sysadmin's password") + sysadmin_credupdate = CredentialUpdateParameters() + sysadmin_credupdate.new_credential = PasswordCredential() + sysadmin_credupdate.new_credential.password = engine_pass + user.update_credential(sys_server, 'USER-1', sysadmin_credupdate) + else: + print_info("sysadmin user has already been configured") + + try: + sys_server = system_serversess(engine_ip, "sysadmin", engine_pass) + domain.get(sys_server) + print_info("domain0 already exists. Skipping domain0 creation.") + elapsed_minutes = time_elapsed() + print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") + sys.exit(7) + except HttpError as e: + device_list = storage.device.get_all(sys_server) + system_init_params = DomainCreateParameters() + system_init_params.devices = [ device.reference for device in device_list if not device.configured ] + print_info("Creating storage domain") + domain.set(sys_server, system_init_params) + while True: + try: + sys_server = system_serversess(engine_ip, "sysadmin", engine_pass) + domain.get(sys_server) + except: + break + print_info("Waiting for Delphix Engine to go down") + time.sleep(3) + + dx_session_obj.serversess(engine_ip, 'sysadmin', + engine_pass, 'SYSTEM') + + dx_session_obj.server_wait() + + except SystemExit as e: + sys.exit(e) + except HttpError as e: + print_error("Connection failed to the Delphix Engine") + print_error( "Please check the ERROR message below") + print_error(e.message) + sys.exit(2) + except JobError as e: + print_error("A job failed in the Delphix Engine") + print_error(e.job) + elapsed_minutes = time_elapsed() + print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") + sys.exit(2) + except KeyboardInterrupt: + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") + sys.exit(2) + except: + print_error(sys.exc_info()[0]) + print_error(traceback.format_exc()) + elapsed_minutes = time_elapsed() + print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") + sys.exit(2) + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/v1_8_2/find_missing_archivelogs.py b/v1_8_2/find_missing_archivelogs.py new file mode 100755 index 0000000..a3ab7ea --- /dev/null +++ b/v1_8_2/find_missing_archivelogs.py @@ -0,0 +1,339 @@ +#!/usr/bin/env python +# Corey Brune - March 2017 +#Description: +# Adapted from Tad Martin's bash script +# +#Requirements +#pip install docopt delphixpy + +#The below doc follows the POSIX compliant standards and allows us to use +#this doc to also define our arguments for the script. +"""Description +Usage: + find_missing_archivelogs.py --outdir + [--engine | --all] + [--debug] [--parallel ] [--poll ] + [--config ] [--logdir ] + find_missing_archivelogs.py -h | --help | -v | --version +Description + Find missing archive logs for each engine + +Examples: + find_missing_archivelogs.py --outdir /var/tmp + + +Options: + --outdir Directory for the output files + --engine Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./dx_operations_vdb.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = 'v.0.0.005' + +import sys +from os.path import basename +from time import sleep, time +from docopt import docopt + +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import RequestError +from delphixpy.web import job +from delphixpy.web import source +from delphixpy.web import database +from delphixpy.web.timeflow import oracle + + +from lib.DlpxException import DlpxException +from lib.DxLogging import logging_est +from lib.DxLogging import print_debug +from lib.DxLogging import print_info +from lib.DxLogging import print_exception +from lib.GetReferences import find_all_objects +from lib.GetReferences import find_obj_by_name +from lib.GetSession import GetSession + + +def find_missing_archivelogs(hostname): + """ + Function to find missing archive log files for Oracle dSources. + """ + print 'Now working on engine {}.'.format(hostname) + + log_file = open('{}/{}.csv'.format(arguments['--outdir'], hostname), 'a+') + + log_file.write('InstanceNumber,Sequence,StartSCN,EndSCN\n') + src_objs = find_all_objects(dx_session_obj.server_session, source) + + for src_obj in src_objs: + if src_obj.virtual is False and src_obj.type == 'OracleLinkedSource': + ora_logs = oracle.log.get_all(dx_session_obj.server_session, + database=find_obj_by_name( + dx_session_obj.server_session, + database, src_obj.name).reference, + missing=True, page_size=1000) + + if ora_logs: + for log_data in ora_logs: + log_file.write('{}, {}, {}, {}, {}, {}\n'.format( + src_obj.name, log_data.instance_num, + log_data.instance_num, log_data.sequence, + log_data.start_scn, log_data.end_scn)) + elif not ora_logs: + log_file.write('{} has no missing files.\n'.format( + src_obj.name)) + log_file.close() + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + E.g.: + @run_async + def task1(): + do_something + @run_async + def task2(): + do_something_too + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +@run_async +def main_workflow(engine): + """ + This function actually runs the jobs. + Use the @run_async decorator to run this function asynchronously. + This allows us to run against multiple Delphix Engine simultaneously + + engine: Dictionary of engines + """ + jobs = {} + + try: + #Setup the connection to the Delphix Engine + dx_session_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + + except DlpxException as e: + print_exception('\nERROR: Engine {} encountered an error while' + '{}:\n{}\n'.format(engine['hostname'], + arguments['--target'], e)) + sys.exit(1) + + thingstodo = ["thingtodo"] + #reset the running job count before we begin + i = 0 + with dx_session_obj.job_mode(single_thread): + while (len(jobs) > 0 or len(thingstodo)> 0): + if len(thingstodo)> 0: + + #if OPERATION: + find_missing_archivelogs(engine['hostname']) + + thingstodo.pop() + + #get all the jobs, then inspect them + i = 0 + for j in jobs.keys(): + job_obj = job.get(dx_session_obj.server_session, jobs[j]) + print_debug(job_obj) + print_info('{}: VDB Operations:{}\n'.format(engine['hostname'], + job_obj.job_state)) + + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + #If the job is in a non-running state, remove it from the + # running jobs list. + del jobs[j] + else: + #If the job is in a running state, increment the running + # job count. + i += 1 + + print_info(engine["hostname"] + ": " + str(i) + " jobs running. ") + #If we have running jobs, pause before repeating the checks. + if len(jobs) > 0: + sleep(float(arguments['--poll'])) + + +def run_job(): + """ + This function runs the main_workflow aynchronously against all the servers + specified + """ + #Create an empty list to store threads we create. + threads = [] + engine = None + + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info('Executing against all Delphix Engines in the dxtools.conf') + + try: + #For each server in the dxtools.conf... + for delphix_engine in dx_session_obj.dlpx_engines: + engine = dx_session_obj[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine)) + + except DlpxException as e: + print_exception('Error encountered in run_job():\n{}'.format(e)) + sys.exit(1) + + elif arguments['--all'] is False: + #Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dx_session_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: {}\n'.format( + (arguments['--engine']))) + + except (DlpxException, RequestError, KeyError) as e: + print_exception('\nERROR: Delphix Engine {} cannot be ' + 'found in {}. Please check your value ' + 'and try again. Exiting.\n{}'.format( + arguments['--engine'], config_file_path, e)) + + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dx_session_obj.dlpx_engines: + if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ + 'true': + engine = dx_session_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: {}'.format( + dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) + break + + if engine is None: + print_exception('\nERROR: No default engine found. Exiting\n') + + #run the job against the engine + threads.append(main_workflow(engine)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + elapsed_minutes = round((time() - time_start)/60, +1) + return elapsed_minutes + + +def main(arguments): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global usebackup + global time_start + global config_file_path + global dx_session_obj + global debug + + if arguments['--debug']: + debug = True + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + engine = None + single_thread = False + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + #This is the function that will handle processing main_workflow for + # all the servers. + run_job() + + elapsed_minutes = time_elapsed() + print_info('script took {:.2f} minutes to get this far.'.format( + elapsed_minutes)) + + #Here we handle what we do when the unexpected happens + except SystemExit as e: + """ + This is what we use to handle our sys.exit(#) + """ + sys.exit(e) + + except HttpError as e: + """ + We use this exception handler when our connection to Delphix fails + """ + print_exception('Connection failed to the Delphix Engine' + 'Please check the ERROR message below') + sys.exit(1) + + except JobError as e: + """ + We use this exception handler when a job fails in Delphix so that + we have actionable data + """ + elapsed_minutes = time_elapsed() + print_exception('A job failed in the Delphix Engine') + print_info('{} took {:.2f} minutes to get this far:\n{}\n'.format( + basename(__file__), elapsed_minutes, e)) + sys.exit(3) + + except KeyboardInterrupt: + """ + We use this exception handler to gracefully handle ctrl+c exits + """ + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far\n'.format( + basename(__file__), elapsed_minutes)) + + except: + """ + Everything else gets caught here + """ + print_exception(sys.exc_info()[0]) + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far\n'.format( + basename(__file__), elapsed_minutes)) + sys.exit(1) + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + #Feed our arguments to the main function, and off we go! + main(arguments) diff --git a/v1_8_2/get_engine_pub_key.py b/v1_8_2/get_engine_pub_key.py new file mode 100755 index 0000000..8dc29be --- /dev/null +++ b/v1_8_2/get_engine_pub_key.py @@ -0,0 +1,169 @@ +#!/usr/bin/env python +''' +Adam Bowen - May 2017 +This script grabs +''' +VERSION="v.2.3.003" + +import getopt +import logging +from os.path import basename +import signal +import sys +import time +import traceback +import untangle + +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.exceptions import HttpError,JobError +from delphixpy.web import system +from lib.GetSession import GetSession + + +def system_serversess(f_engine_address, f_engine_username, f_engine_password): + ''' + Function to grab the server session + ''' + server_session= DelphixEngine(f_engine_address, f_engine_username, f_engine_password, "SYSTEM") + return server_session + +def help(): + print("\n" + basename(__file__)+ " [-e ] [-p - Engine must be up and console screen must be green") + print("-p - sysadmin password") + print("-d - directory where key will be saved") + print("-v - Print version information and exit") + sys.exit(2) + +def logging_est(): + ''' + Establish Logging + ''' + global debug + logging.basicConfig(filename='landshark_setup.log',format='%(levelname)s:%(asctime)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') + print_info("Welcome to " + basename(__file__) + ", version " + VERSION) + global logger + debug = True + logger = logging.getLogger() + logger.setLevel(10) + print_info("Debug Logging is enabled.") + +def on_exit(sig, func=None): + print_info("Shutdown Command Received") + print_info("Shutting down prime_setup.py") + sys.exit(0) + +def print_debug(print_obj): + ''' + DEBUG Log-level + ''' + if debug == True: + print "DEBUG: " + str(print_obj) + logging.debug(str(print_obj)) + +def print_error(print_obj): + ''' + ERROR Log-level + ''' + print "ERROR: " + str(print_obj) + logging.error(str(print_obj)) + +def print_info(print_obj): + ''' + INFO Log-level + ''' + print "INFO: " + str(print_obj) + logging.info(str(print_obj)) + +def print_warning(print_obj): + ''' + WARNING Log-level + ''' + print "WARNING: " + str(print_obj) + logging.warning(str(print_obj)) + +def set_exit_handler(func): + signal.signal(signal.SIGTERM, func) + +def time_elapsed(): + elapsed_minutes = round((time.time() - time_start)/60, +1) + return elapsed_minutes + +def version(): + print("Version: " +VERSION) + logging_est() + set_exit_handler(on_exit) + sys.exit(1) + +def main(argv): + try: + logging_est() + global time_start + time_start = time.time() + dx_session_obj = GetSession() + engine_ip = "" + engine_pass = "" + old_engine_pass = "" + try: + opts,args = getopt.getopt(argv,"e:d:p:hv") + except getopt.GetoptError: + help() + for opt, arg in opts: + if opt == '-h': + help() + elif opt == '-e': + engine_ip = arg + elif opt == '-p': + engine_pass = arg + elif opt == '-d': + key_path = arg + '/engine_key.pub' + elif opt == '-v': + version() + + if (engine_ip == "" or engine_pass == "") : + help() + + dx_session_obj.serversess(engine_ip, 'sysadmin', + engine_pass, 'SYSTEM') + dx_session_obj.server_wait() + + sys_server = system_serversess(engine_ip, "sysadmin", engine_pass) + system_info = system.get(sys_server) + print_info(system_info.ssh_public_key) + print_info("Writing to " + key_path) + target = open(key_path, 'w') + target.write(system_info.ssh_public_key) + target.close + print_info("File saved") + elapsed_minutes = time_elapsed() + print_info("Script took " + str(elapsed_minutes) + " minutes to get this far.") + + except SystemExit as e: + sys.exit(e) + except HttpError as e: + print_error("Connection failed to the Delphix Engine") + print_error( "Please check the ERROR message below") + print_error(e.message) + sys.exit(2) + except JobError as e: + print_error("A job failed in the Delphix Engine") + print_error(e.job) + elapsed_minutes = time_elapsed() + print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") + sys.exit(2) + except KeyboardInterrupt: + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") + sys.exit(2) + except: + print_error(sys.exc_info()[0]) + print_error(traceback.format_exc()) + elapsed_minutes = time_elapsed() + print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") + sys.exit(2) + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/v1_8_2/js_bookmark.py b/v1_8_2/js_bookmark.py new file mode 100755 index 0000000..dd77bbc --- /dev/null +++ b/v1_8_2/js_bookmark.py @@ -0,0 +1,512 @@ +#!/usr/bin/env python +# Program Name : js_bookmark.py +# Description : Delphix implementation script +# Author : Corey Brune +# Created: March 4 2016 +# +# Copyright (c) 2016 by Delphix. +# All rights reserved. +# See http://docs.delphix.com/display/PS/Copyright+Statement for details +# +# Delphix Support statement available at +# See http://docs.delphix.com/display/PS/PS+Script+Support+Policy for details +# +# Warranty details provided in external file +# for customers who have purchased support. +# +"""Creates, lists, removes a Jet Stream Bookmark +Usage: + js_bookmark.py (--create_bookmark --data_layout [--branch_name | --activate_bookmark | --update_bookmark | --share_bookmark | --unshare_bookmark ) + [--engine | --all] [--parallel ] + [--poll ] [--debug] + [--config ] [--logdir ] + js_bookmark.py -h | --help | -v | --version + +Creates, Lists, Removes a Jet Stream Bookmark + +Examples: + js_bookmark.py --list_bookmarks + js_bookmark.py --create_bookmark jsbookmark1 --data_layout jstemplate1 + js_bookmark.py --create_bookmark jsbookmark1 --data_layout jstemplate1 --branch_name jsbranch1 + js_bookmark.py --activate_bookmark jsbookmark1 + js_bookmark.py --update_bookmark jsbookmark1 + js_bookmark.py --delete_bookmark jsbookmark1 + js_bookmark.py --share_bookmark jsbookmark1 + js_bookmark.py --unshare_bookmark jsbookmark1 + +Options: + --create_bookmark Name of the new JS Bookmark + --container_name Name of the container to use + --update_bookmark Name of the bookmark to update + --share_bookmark Name of the bookmark to share + --unshare_bookmark Name of the bookmark to unshare + --branch_name Optional: Name of the branch to use + --data_layout Name of the data layout (container or template) to use + --activate_bookmark Name of the bookmark to activate + --delete_bookmark Delete the JS Bookmark + --list_bookmarks List the bookmarks on a given engine + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./js_bookmark.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION="v.0.0.015" + +from docopt import docopt +from os.path import basename +import sys +from time import sleep, time +import traceback + +from delphixpy.web import job +from delphixpy.web.jetstream import bookmark +from delphixpy.web.jetstream import branch +from delphixpy.web.jetstream import template +from delphixpy.web.jetstream import container +from delphixpy.web.jetstream import datasource +from delphixpy.web.vo import JSBookmarkCreateParameters +from delphixpy.web.vo import JSBookmark +from delphixpy.exceptions import RequestError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import HttpError + +from lib.DlpxException import DlpxException +from lib.GetSession import GetSession +from lib.GetReferences import find_obj_by_name +from lib.GetReferences import find_obj_name +from lib.GetReferences import get_obj_reference +from lib.DxLogging import logging_est +from lib.DxLogging import print_info +from lib.DxLogging import print_debug +from lib.DxLogging import print_exception + + +def create_bookmark(dlpx_obj, bookmark_name, source_layout, branch_name=None): + """ + Create the JS Bookmark + + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession + :param bookmark_name: Name of the bookmark to create + :type bookmark_name: str + :param source_layout: Name of the source (template or container) to use + :type source_layout: str + :param branch_name: Name of the branch to use + :type branch_name: str + """ + + branch_ref = None + source_layout_ref = None + engine_name = dlpx_obj.dlpx_engines.keys()[0] + js_bookmark_params = JSBookmarkCreateParameters() + if branch_name: + if branch_name == 'master' or branch_name == 'default': + try: + source_layout_ref = find_obj_by_name(dlpx_obj.server_session, + template, + source_layout).reference + except DlpxException: + source_layout_ref = find_obj_by_name( + dlpx_obj.server_session, container, + source_layout).reference + for branch_obj in branch.get_all(dlpx_obj.server_session): + if branch_name == branch_obj.name and \ + source_layout_ref == branch_obj.data_layout: + branch_ref = branch_obj.reference + break + elif branch_name is None: + (source_layout_ref, branch_ref) = find_obj_by_name( + dlpx_obj.server_session, template, source_layout, True) + if branch_ref is None: + raise DlpxException('Could not find {} in engine {}'.format( + branch_name, engine_name)) + js_bookmark_params.bookmark = {'name': bookmark_name, 'branch': branch_ref, + 'type': 'JSBookmark'} + js_bookmark_params.timeline_point_parameters = { + 'sourceDataLayout': source_layout_ref, 'type': + 'JSTimelinePointLatestTimeInput'} + try: + bookmark.create(dlpx_obj.server_session, js_bookmark_params) + dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job + print_info('JS Bookmark {} was created successfully.'.format( + bookmark_name)) + + except (DlpxException, RequestError, HttpError) as e: + print_exception('\nThe bookmark {} was not created. The error ' + 'was:\n\n{}'.format(bookmark_name, e)) + + +def list_bookmarks(dlpx_obj): + """ + List all bookmarks on a given engine + + :param dlpx_obj: Virtualization Engine session object + + """ + + header = '\nName, Reference, Branch Name, Template Name' + try: + js_bookmarks = bookmark.get_all(dlpx_obj.server_session) + print header + for js_bookmark in js_bookmarks: + branch_name = find_obj_name(dlpx_obj.server_session, branch, + js_bookmark.branch) + print '{}, {}, {}, {}'.format(js_bookmark.name, + js_bookmark.reference, branch_name, + js_bookmark.template_name) + print '\n' + + except (DlpxException, HttpError, RequestError) as e: + print_exception('\nERROR: The bookmarks on could not be listed. The ' + 'error was:\n\n{}'.format(e)) + + +def unshare_bookmark(dlpx_obj, bookmark_name): + """ + Unshare a bookmark + + :param dlpx_obj: Virtualization Engine session object + :param bookmark_name: Name of the bookmark to share + """ + + try: + bookmark.unshare(dlpx_obj.server_session, + get_obj_reference(dlpx_obj.server_session, + bookmark, bookmark_name).pop()) + print_info('JS Bookmark {} was unshared successfully.'.format( + bookmark_name)) + except (DlpxException, HttpError, RequestError) as e: + print_exception('\nERROR: The bookmark {} could not be unshared. ' + 'The error was:\n\n{}'.format(bookmark_name, e)) + + +def share_bookmark(dlpx_obj, bookmark_name): + """ + Share a bookmark + + :param dlpx_obj: Virtualization Engine session object + :param bookmark_name: Name of the bookmark to share + """ + + try: + bookmark.share(dlpx_obj.server_session, + get_obj_reference(dlpx_obj.server_session, + bookmark, bookmark_name).pop()) + print_info('JS Bookmark {} was shared successfully.'.format( + bookmark_name)) + except (DlpxException, HttpError, RequestError) as e: + print_exception('\nERROR: The bookmark {} could not be shared. The ' + 'error was:\n\n{}'.format(bookmark_name, e)) + + +def update_bookmark(dlpx_obj, bookmark_name): + """ + Updates a bookmark + + :param dlpx_obj: Virtualization Engine session object + :param bookmark_name: Name of the bookmark to update + """ + + js_bookmark_obj = JSBookmark() + + try: + bookmark.update(dlpx_obj.server_session, + get_obj_reference(dlpx_obj.server_session, + bookmark, bookmark_name).pop(), + js_bookmark_obj) + + except (DlpxException, HttpError, RequestError) as e: + print_exception('ERROR: The bookmark {} could not be updated. The ' + 'error was:\n{}'.format(bookmark_name, e)) + + +def delete_bookmark(dlpx_obj, bookmark_name): + """ + Deletes a bookmark + + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession + :param bookmark_name: Bookmark to delete + :type bookmark_name: str + """ + + try: + bookmark.delete(dlpx_obj.server_session, + get_obj_reference(dlpx_obj.server_session, + bookmark, bookmark_name).pop()) + print_info('The bookmark {} was deleted successfully.'.format( + bookmark_name)) + except (DlpxException, HttpError, RequestError) as e: + print_exception('\nERROR: The bookmark {} was not deleted. The ' + 'error was:\n\n{}'.format(bookmark_name, e.message)) + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + + E.g.: + @run_async + def task1(): + do_something + + @run_async + def task2(): + do_something_too + + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target=func, args=args, kwargs=kwargs) + func_hl.start() + return func_hl + + return async_func + + +def time_elapsed(time_start): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + + :param time_start: start time of the script. + :type time_start: float + """ + return round((time() - time_start)/60, +1) + + +@run_async +def main_workflow(engine, dlpx_obj): + """ + This function is where we create our main workflow. + Use the @run_async decorator to run this function asynchronously. + The @run_async decorator allows us to run against multiple Delphix Engine + simultaneously + + :param engine: Dictionary of engines + :type engine: dictionary + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession + """ + + try: + # Setup the connection to the Delphix Engine + dlpx_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + except DlpxException as e: + print_exception('ERROR: js_bookmark encountered an error authenticating' + ' to {} {}:\n{}\n'.format(engine['hostname'], + arguments['--target'], e)) + sys.exit(1) + + thingstodo = ["thingtodo"] + try: + with dlpx_obj.job_mode(single_thread): + while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0: + if len(thingstodo) > 0: + if arguments['--create_bookmark']: + create_bookmark(dlpx_obj, + arguments['--create_bookmark'], + arguments['--data_layout'], + arguments['--branch_name'] + if arguments['--branch_name'] + else None) + elif arguments['--delete_bookmark']: + delete_bookmark(dlpx_obj, + arguments['--delete_bookmark']) + elif arguments['--update_bookmark']: + update_bookmark(dlpx_obj, + arguments['--update_bookmark']) + elif arguments['--share_bookmark']: + share_bookmark(dlpx_obj, + arguments['--share_bookmark']) + elif arguments['--unshare_bookmark']: + unshare_bookmark(dlpx_obj, + arguments['--unshare_bookmark']) + elif arguments['--list_bookmarks']: + list_bookmarks(dlpx_obj) + thingstodo.pop() + # get all the jobs, then inspect them + i = 0 + for j in dlpx_obj.jobs.keys(): + job_obj = job.get(dlpx_obj.server_session, + dlpx_obj.jobs[j]) + print_debug(job_obj) + print_info('{}: Running JS Bookmark: {}'.format( + engine['hostname'], job_obj.job_state)) + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + # If the job is in a non-running state, remove it + # from the running jobs list. + del dlpx_obj.jobs[j] + elif job_obj.job_state in 'RUNNING': + # If the job is in a running state, increment the + # running job count. + i += 1 + print_info('{}: {:d} jobs running.'.format( + engine['hostname'], i)) + # If we have running jobs, pause before repeating the + # checks. + if len(dlpx_obj.jobs) > 0: + sleep(float(arguments['--poll'])) + except (DlpxException, RequestError, JobError, HttpError) as e: + print_exception('Error in js_bookmark: {}\n{}'.format( + engine['hostname'], e)) + sys.exit(1) + + +def run_job(dlpx_obj, config_file_path): + """ + This function runs the main_workflow aynchronously against all the + servers specified + + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession + :param config_file_path: string containing path to configuration file. + :type config_file_path: str + """ + + # Create an empty list to store threads we create. + threads = [] + engine = None + + # If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info('Executing against all Delphix Engines in the dxtools.conf') + try: + # For each server in the dxtools.conf... + for delphix_engine in dlpx_obj.dlpx_engines: + engine = dlpx_obj.dlpx_engines[delphix_engine] + # Create a new thread and add it to the list. + threads.append(main_workflow(engine, dlpx_obj)) + except DlpxException as e: + print_exception('Error encountered in run_job():\n{}'.format(e)) + sys.exit(1) + + elif arguments['--all'] is False: + # Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dlpx_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: {}\n'.format( + arguments['--engine'])) + except (DlpxException, RequestError, KeyError): + raise DlpxException('\nERROR: Delphix Engine {} cannot be ' + 'found in {}. Please check your value and' + ' try again. Exiting.\n'.format( + arguments['--engine'], config_file_path)) + else: + # Else search for a default engine in the dxtools.conf + for delphix_engine in dlpx_obj.dlpx_engines: + if dlpx_obj.dlpx_engines[delphix_engine]['default'] == 'true': + engine = dlpx_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: {}'.format( + dlpx_obj.dlpx_engines[delphix_engine]['hostname'])) + break + + if engine is None: + raise DlpxException('\nERROR: No default engine found. Exiting') + + # run the job against the engine + threads.append(main_workflow(engine, dlpx_obj)) + + # For each thread in the list... + for each in threads: + # join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def main(): + # We want to be able to call on these variables anywhere in the script. + global single_thread + global debug + + time_start = time() + single_thread = False + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + config_file_path = arguments['--config'] + # Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + # This is the function that will handle processing main_workflow for + # all the servers. + run_job(dx_session_obj, config_file_path) + + elapsed_minutes = time_elapsed(time_start) + print_info('script took {:.2f} minutes to get this far.'.format( + elapsed_minutes)) + + # Here we handle what we do when the unexpected happens + except SystemExit as e: + # This is what we use to handle our sys.exit(#) + sys.exit(e) + + except DlpxException as e: + # We use this exception handler when an error occurs in a function call. + print_exception('ERROR: Please check the ERROR message below:\n' + '{}'.format(e.message)) + sys.exit(2) + + except HttpError as e: + # We use this exception handler when our connection to Delphix fails + print_exception('ERROR: Connection failed to the Delphix Engine. Please' + 'check the ERROR message below:\n{}'.format(e.message)) + sys.exit(2) + + except JobError as e: + # We use this exception handler when a job fails in Delphix so that we + # have actionable data + print_exception('A job failed in the Delphix Engine:\n{}'.format(e.job)) + elapsed_minutes = time_elapsed(time_start) + print_exception('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + sys.exit(3) + + except KeyboardInterrupt: + # We use this exception handler to gracefully handle ctrl+c exits + print_debug('You sent a CTRL+C to interrupt the process') + elapsed_minutes = time_elapsed(time_start) + print_info('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + except: + # Everything else gets caught here + print_exception('{}\n{}'.format(sys.exc_info()[0], + traceback.format_exc())) + elapsed_minutes = time_elapsed(time_start) + print_info("{} took {:.2f} minutes to get this far".format( + basename(__file__), elapsed_minutes)) + sys.exit(1) + +if __name__ == "__main__": + # Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + + # Feed our arguments to the main function, and off we go! + main() diff --git a/v1_8_2/js_branch.py b/v1_8_2/js_branch.py new file mode 100755 index 0000000..383e849 --- /dev/null +++ b/v1_8_2/js_branch.py @@ -0,0 +1,465 @@ +#!/usr/bin/env python +# Program Name : js_branch.py +# Description : Delphix implementation script +# Author : Corey Brune +# Created: March 4 2016 +# +# Copyright (c) 2016 by Delphix. +# All rights reserved. +# See http://docs.delphix.com/display/PS/Copyright+Statement for details +# +# Delphix Support statement available at +# See http://docs.delphix.com/display/PS/PS+Script+Support+Policy for details +# +# Warranty details provided in external file +# for customers who have purchased support. +# +"""Creates, updates, deletes, activates and lists branches +Usage: + js_branch.py (--create_branch --container_name --template_name | --list_branches | --delete_branch | --activate_branch | --update_branch ) + [--engine | --all] [--parallel ] + [--poll ] [--debug] + [--config ] [--logdir ] + js_branch.py -h | --help | -v | --version + +Creates, Lists, Removes a Jet Stream Branch + +Examples: + js_branch.py --list_branches + js_branch.py --create_branch jsbranch1 --container_name jscontainer --template_name jstemplate1 + js_branch.py --activate_branch jsbranch1 + js_branch.py --delete_branch jsbranch1 + js_branch.py --update_branch jsbranch1 + +Options: + --create_branch Name of the new JS Branch + --container_name Name of the container to use + --update_branch Name of the branch to update + --template_name Name of the template to use + --activate_branch Name of the branch to activate + --delete_branch Delete the JS Branch + --list_branches List the branchs on a given engine + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./js_branch.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION="v.0.0.010" + +from docopt import docopt +from os.path import basename +import sys +import traceback +import re +from time import time, sleep + +from delphixpy.web import job +from delphixpy.web.jetstream import branch +from delphixpy.web.jetstream import container +from delphixpy.web.jetstream import template +from delphixpy.web.jetstream import operation +from delphixpy.web.vo import JSBranchCreateParameters +from delphixpy.web.vo import JSBranch +from delphixpy.exceptions import RequestError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import HttpError + +from lib.DlpxException import DlpxException +from lib.GetSession import GetSession +from lib.GetReferences import find_obj_by_name +from lib.GetReferences import find_obj_name +from lib.DxLogging import logging_est +from lib.DxLogging import print_info +from lib.DxLogging import print_debug +from lib.DxLogging import print_exception + + +def create_branch(dlpx_obj, branch_name, template_name, container_name): + """ + Create the JS Branch + + dlpx_obj: Virtualization Engine session object + branch_name: Name of the branch to create + template_name: Name of the template to use + container_name: Name of the container to use + """ + + js_branch_params = JSBranchCreateParameters() + js_branch_params.name = branch_name + engine_name = dlpx_obj.dlpx_engines.keys()[0] + try: + data_container_obj = find_obj_by_name(dlpx_obj.server_session, + container, container_name) + source_layout_obj = find_obj_by_name(dlpx_obj.server_session, + template, template_name) + js_branch_params.data_container = data_container_obj.reference + js_branch_params.timeline_point_parameters = { + 'sourceDataLayout': + source_layout_obj.reference, + 'type': + 'JSTimelinePointLatestTimeInput'} + branch.create(dlpx_obj.server_session, js_branch_params) + dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job + print_info('JS Branch {} was created successfully.'.format( + branch_name)) + except (DlpxException, RequestError, HttpError) as e: + print_exception('\nThe branch was not created. The error was:' + '\n{}'.format(e)) + + +def list_branches(dlpx_obj): + """ + List all branches on a given engine + + dlpx_obj: Virtualization Engine session object + No args required + """ + + try: + header = '\nBranch Name, Data Layout, Reference, End Time' + js_data_layout = '' + js_branches = branch.get_all(dlpx_obj.server_session) + + print header + for js_branch in js_branches: + js_end_time = operation.get(dlpx_obj.server_session, + js_branch.first_operation).end_time + if re.search('TEMPLATE', js_branch.data_layout): + js_data_layout = find_obj_name(dlpx_obj.server_session, + template, js_branch.data_layout) + elif re.search('CONTAINER', js_branch.data_layout): + js_data_layout = find_obj_name(dlpx_obj.server_session, + container, js_branch.data_layout) + print_info('{} {}, {}, {}'.format(js_branch._name[0], + js_data_layout, + js_branch.reference, + js_end_time)) + except (DlpxException, HttpError, RequestError) as e: + print_exception('\nERROR: JS Branches could not be listed. The ' + 'error was:\n\n{}'.format(e)) + + +def update_branch(dlpx_obj, branch_name): + """ + Updates a branch + + dlpx_obj: Virtualization Engine session object + branch_name: Name of the branch to update + """ + + js_branch_obj = JSBranch() + try: + branch_obj = find_obj_by_name(dlpx_obj.server_session, + branch, branch_name) + branch.update(dlpx_obj.server_session, branch_obj.reference, + js_branch_obj) + print_info('The branch {} was updated successfully.'.format( + branch_name)) + except (DlpxException, HttpError, RequestError) as e: + print_exception('\nERROR: The branch could not be updated. The ' + 'error was:\n\n{}'.format(e)) + + +def activate_branch(dlpx_obj, branch_name): + """ + Activates a branch + + dlpx_obj: Virtualization Engine session object + branch_name: Name of the branch to activate + """ + + engine_name = dlpx_obj.dlpx_engines.keys()[0] + try: + branch_obj = find_obj_by_name(dlpx_obj.server_session, + branch, branch_name) + branch.activate(dlpx_obj.server_session, branch_obj.reference) + dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job + print_info('The branch {} was activated successfully.'.format( + branch_name)) + except RequestError as e: + print_exception('\nAn error occurred activating the ' + 'branch:\n{}'.format(e)) + + +def delete_branch(dlpx_obj, branch_name): + """ + Deletes a branch + dlpx_obj: Virtualization Engine session object + branch_name: Branch to delete + """ + + try: + branch_obj = find_obj_by_name(dlpx_obj.server_session, + branch, branch_name) + branch.delete(dlpx_obj.server_session, branch_obj.reference) + except (DlpxException, HttpError, RequestError) as e: + print_exception('\nERROR: The branch was not deleted. The ' + 'error was:\n\n{}'.format(e.message)) + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + + E.g.: + @run_async + def task1(): + do_something + + @run_async + def task2(): + do_something_too + + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + return round((time() - time_start)/60, +1) + + +@run_async +def main_workflow(engine, dlpx_obj): + """ + This function is where we create our main workflow. + Use the @run_async decorator to run this function asynchronously. + The @run_async decorator allows us to run against multiple Delphix Engine + simultaneously + + engine: Dictionary of engines + dlpx_obj: Virtualization Engine session object + """ + + #Establish these variables as empty for use later + environment_obj = None + source_objs = None + + try: + #Setup the connection to the Delphix Engine + dlpx_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + except DlpxException as e: + print_exception('\nERROR: Engine {} encountered an error while ' + 'provisioning {}:\n{}\n'.format(engine['hostname'], + arguments['--target'], + e)) + sys.exit(1) + + thingstodo = ["thingtodo"] + try: + with dlpx_obj.job_mode(single_thread): + while (len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0): + if len(thingstodo) > 0: + if arguments['--create_branch']: + create_branch(dlpx_obj, arguments['--create_branch'], + arguments['--template_name'], + arguments['--container_name']) + elif arguments['--delete_branch']: + delete_branch(dlpx_obj, arguments['--delete_branch']) + elif arguments['--update_branch']: + update_branch(dlpx_obj, arguments['--update_branch']) + elif arguments['--activate_branch']: + activate_branch(dlpx_obj, + arguments['--activate_branch']) + elif arguments['--list_branches']: + list_branches(dlpx_obj) + thingstodo.pop() + # get all the jobs, then inspect them + i = 0 + for j in dlpx_obj.jobs.keys(): + job_obj = job.get(dlpx_obj.server_session, + dlpx_obj.jobs[j]) + print_debug(job_obj) + print_info('{}: Provisioning JS Branch: {}'.format( + engine['hostname'], job_obj.job_state)) + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + # If the job is in a non-running state, remove it + # from the running jobs list. + del dlpx_obj.jobs[j] + elif job_obj.job_state in 'RUNNING': + # If the job is in a running state, increment the + # running job count. + i += 1 + print_info('{}: {:d} jobs running.'.format( + engine['hostname'], i)) + # If we have running jobs, pause before repeating the + # checks. + if len(dlpx_obj.jobs) > 0: + sleep(float(arguments['--poll'])) + except (DlpxException, RequestError, JobError, HttpError) as e: + print_exception('\nError in js_branch: {}\n{}'.format( + engine['hostname'], e)) + + +def run_job(dlpx_obj, config_file_path): + """ + This function runs the main_workflow aynchronously against all the + servers specified + + dlpx_obj: Virtualization Engine session object + config_file_path: path containing the dxtools.conf file. + """ + #Create an empty list to store threads we create. + threads = [] + engine = None + + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + try: + #For each server in the dxtools.conf... + for delphix_engine in dlpx_obj.dlpx_engines: + engine = dlpx_obj.dlpx_engines[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine, dlpx_obj)) + + except DlpxException as e: + print_exception('Error encountered in run_job():\n{}'.format(e)) + elif arguments['--all'] is False: + #Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dlpx_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: {}\n'.format( + arguments['--engine'])) + + except (DlpxException, RequestError, KeyError) as e: + print_exception('\nERROR: Delphix Engine {} cannot be found' + ' in {}. Please check your value and try' + ' again. Exiting.\n'.format( + arguments['--engine'], config_file_path)) + + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dlpx_obj.dlpx_engines: + if dlpx_obj.dlpx_engines[delphix_engine]['default'] == \ + 'true': + + engine = dlpx_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: {}'.format( + dlpx_obj.dlpx_engines[delphix_engine]['hostname'])) + break + + if engine is None: + raise DlpxException("\nERROR: No default engine found. Exiting") + + #run the job against the engine + threads.append(main_workflow(engine, dlpx_obj)) + + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def main(): + #We want to be able to call on these variables anywhere in the script. + global single_thread + global time_start + global debug + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + single_thread = False + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + #This is the function that will handle processing main_workflow for + # all the servers. + run_job(dx_session_obj, config_file_path) + elapsed_minutes = time_elapsed() + print_info('Script took {:.2f} minutes to get this far.'.format( + elapsed_minutes)) + + #Here we handle what we do when the unexpected happens + except SystemExit as e: + #This is what we use to handle our sys.exit(#) + sys.exit(e) + + except DlpxException as e: + #We use this exception handler when an error occurs in a function call. + + print('\nERROR: Please check the ERROR message below:\n{}'.format( + e.message)) + sys.exit(2) + + except HttpError as e: + #We use this exception handler when our connection to Delphix fails + + print('\nERROR: Connection failed to the Delphix Engine. Please ' + 'check the ERROR message below:\n{}'.format(e.message)) + sys.exit(2) + + except JobError as e: + #We use this exception handler when a job fails in Delphix so that we + #have actionable data + + print('A job failed in the Delphix Engine:\n{}'.format(e.job)) + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + sys.exit(3) + + except KeyboardInterrupt: + #We use this exception handler to gracefully handle ctrl+c exits + + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + + except: + #Everything else gets caught here + + print(sys.exc_info()[0]) + print(traceback.format_exc()) + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + sys.exit(1) + + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + + #Feed our arguments to the main function, and off we go! + main() diff --git a/v1_8_2/js_container.py b/v1_8_2/js_container.py new file mode 100755 index 0000000..38c1ec1 --- /dev/null +++ b/v1_8_2/js_container.py @@ -0,0 +1,608 @@ +#!/usr/bin/env python +# Program Name : js_container.py +# Description : Delphix implementation script +# Author : Corey Brune +# Created: March 4 2016 +# +# Copyright (c) 2016 by Delphix. +# All rights reserved. +# See http://docs.delphix.com/display/PS/Copyright+Statement for details +# +# Delphix Support statement available at +# See http://docs.delphix.com/display/PS/PS+Script+Support+Policy for details +# +# Warranty details provided in external file +# for customers who have purchased support. +# +"""Create, delete, refresh and list JS containers. +Usage: + js_container.py (--create_container --template_name --database | --list_hierarchy | --list | --delete_container [--keep_vdbs]| --refresh_container | --add_owner --container_name | --remove_owner --container_name | --restore_container --bookmark_name ) + [--engine | --all] [--parallel ] + [--poll ] [--debug] + [--config ] [--logdir ] + js_container.py -h | --help | -v | --version + +Creates, Lists, Removes a Jet Stream Template + +Examples: + js_container.py --list + js_container.py --list_hierarchy jscontainer1 + js_container.py --add_owner jsuser + js_container.py --create_container jscontainer1 --database --template_name jstemplate1 + js_container.py --delete_container jscontainer1 + js_container.py --refresh_container jscontainer1 + js_container.py --add_owner jsuser --container_name jscontainer1 + js_container.py --remove_owner jsuser --container_name jscontainer1 + js_container.py --refresh_container jscontainer1 + js_container.py --restore_container jscontainer1 --bookmark_name jsbookmark1 + +Options: + --create_container Name of the new JS Container + --container_name Name of the JS Container + --refresh_container Name of the new JS Container + --restore_container Name of the JS Container to restore + --template_name Name of the JS Template to use for the container + --add_owner Name of the JS Owner for the container + --remove_owner Name of the JS Owner to remove + --bookmark_name Name of the JS Bookmark to restore the container + --keep_vdbs If set, deleting the container will not remove + the underlying VDB(s) + --list_hierarchy Lists hierarchy of a given container name + --delete_container Delete the JS Container + --database Name of the child database(s) to use for the + JS Container + --list_containers List the containers on a given engine + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./js_container.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION = "v.0.0.015" + +from os.path import basename +import sys +import traceback +from time import sleep, time +from docopt import docopt + +from delphixpy.web.jetstream import container +from delphixpy.web.jetstream import bookmark +from delphixpy.web.jetstream import template +from delphixpy.web.jetstream import datasource +from delphixpy.web import database +from delphixpy.web import user +from delphixpy.web import job +from delphixpy.web.vo import JSDataContainerCreateParameters +from delphixpy.web.vo import JSDataSourceCreateParameters +from delphixpy.web.vo import JSTimelinePointBookmarkInput +from delphixpy.web.vo import JSDataContainerModifyOwnerParameters +from delphixpy.web.vo import JSDataContainerDeleteParameters +from delphixpy.exceptions import RequestError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import HttpError + +from lib.DlpxException import DlpxException +from lib.GetSession import GetSession +from lib.GetReferences import find_obj_by_name +from lib.GetReferences import get_obj_reference +from lib.GetReferences import find_obj_name +from lib.GetReferences import convert_timestamp +from lib.DxLogging import logging_est +from lib.DxLogging import print_info +from lib.DxLogging import print_exception +from lib.DxLogging import print_debug + + +def create_container(dlpx_obj, template_name, container_name, database_name): + """ + Create the JS container + + dlpx_obj: Virtualization Engine session object + container_name: Name of the container to create + database_name: Name of the database(s) to use in the container + """ + + js_container_params = JSDataContainerCreateParameters() + container_ds_lst = [] + engine_name = dlpx_obj.dlpx_engines.keys()[0] + for db in database_name.split(':'): + container_ds_lst.append(build_ds_params(dlpx_obj, database, db)) + + try: + js_template_obj = find_obj_by_name(dlpx_obj.server_session, + template, template_name) + js_container_params.template = js_template_obj.reference + js_container_params.timeline_point_parameters = { + 'sourceDataLayout': js_template_obj.reference, + 'type': 'JSTimelinePointLatestTimeInput'} + js_container_params.data_sources = container_ds_lst + js_container_params.name = container_name + container.create(dlpx_obj.server_session, js_container_params) + dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job + print_info('JS Container {} was created successfully.'.format( + container_name)) + except (DlpxException, RequestError, HttpError) as e: + print_exception('Container {} was not created. The error ' + 'was:\n{}\n'.format(container_name, e)) + + +def remove_owner(dlpx_obj, owner_name, container_name): + """ + Removes an owner from a container + + dlpx_obj: Virtualization Engine session object + owner_name: Name of the owner to remove + container_name: Name of the container + """ + + owner_params = JSDataContainerModifyOwnerParameters() + try: + user_ref = find_obj_by_name(dlpx_obj.server_session, + user, owner_name).reference + owner_params.owner = user_ref + container_obj = find_obj_by_name(dlpx_obj.server_session, + container, container_name) + container.remove_owner(dlpx_obj.server_session, + container_obj.reference, owner_params) + print_info('User {} was granted access to {}'.format(owner_name, + container_name)) + except (DlpxException, RequestError, HttpError) as e: + print_exception('The user was not added to container {}. The ' + 'error was:\n{}\n'.format(container_name, e)) + + +def restore_container(dlpx_obj, container_name, bookmark_name): + """ + Restores a container to a given JS bookmark + + dlpx_obj: Virtualization Engine session object + container_name: Name of the container + bookmark_name: Name of the bookmark to restore + """ + bookmark_params = JSTimelinePointBookmarkInput() + bookmark_params.bookmark = get_obj_reference(dlpx_obj.server_session, + bookmark, bookmark_name).pop() + engine_name = dlpx_obj.dlpx_engines.keys()[0] + try: + container.restore(dlpx_obj.server_session, get_obj_reference( + dlpx_obj.server_session, container, container_name).pop(), + bookmark_params) + dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job + print_info('Container {} was restored successfully with ' + 'bookmark {}'.format(container_name, bookmark_name)) + except (DlpxException, RequestError, HttpError) as e: + print_exception('The user was not added to container {}. The ' + 'error was:\n{}\n'.format(container_name, e)) + + +def add_owner(dlpx_obj, owner_name, container_name): + """ + Adds an owner to a container + + dlpx_obj: Virtualization Engine session object + owner_name: Grant authorizations for the given user on this container and + parent template + container_name: Name of the container + """ + + owner_params = JSDataContainerModifyOwnerParameters() + try: + owner_params.owner = get_obj_reference(dlpx_obj.server_session, + user, owner_name).pop() + container.add_owner(dlpx_obj.server_session, + get_obj_reference(dlpx_obj.server_session, + container, container_name).pop(), + owner_params) + print_info('User {} was granted access to {}'.format(owner_name, + container_name)) + except (DlpxException, RequestError, HttpError) as e: + print_exception('The user was not added to container {}. The error' + ' was:\n{}\n'.format(container_name, e)) + + +def refresh_container(dlpx_obj, container_name): + """ + Refreshes a container + + dlpx_obj: Virtualization Engine session object + container_name: Name of the container to refresh + """ + + engine_name = dlpx_obj.dlpx_engines.keys()[0] + try: + container.refresh(dlpx_obj.server_session, + get_obj_reference(dlpx_obj.server_session, + container, container_name).pop()) + dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job + print_info('The container {} was refreshed.'.format(container_name)) + except (DlpxException, RequestError, HttpError) as e: + print_exception('\nContainer {} was not refreshed. The error ' + 'was:\n{}\n'.format(container_name, e)) + + +def delete_container(dlpx_obj, container_name, keep_vdbs=False): + """ + Deletes a container + + dlpx_obj: Virtualization Engine session object + container_name: Container to delete + """ + + try: + if keep_vdbs: + js_container_params = JSDataContainerDeleteParameters() + js_container_params.delete_data_sources = False + container.delete(dlpx_obj.server_session, + get_obj_reference(dlpx_obj.server_session, + container, container_name).pop(), + js_container_params) + elif keep_vdbs is False: + container.delete(dlpx_obj.server_session, + get_obj_reference(dlpx_obj.server_session, + container, container_name).pop()) + except (DlpxException, RequestError, HttpError) as e: + print_exception('\nContainer {} was not deleted. The error ' + 'was:\n{}\n'.format(container_name, e)) + + +def list_containers(dlpx_obj): + """ + List all containers on a given engine + + dlpx_obj: Virtualization Engine session object + """ + + header = 'Name, Active Branch, Owner, Reference, Template, Last Updated' + js_containers = container.get_all(dlpx_obj.server_session) + try: + print header + for js_container in js_containers: + last_updated = convert_timestamp(dlpx_obj.server_session, + js_container.last_updated[:-5]) + print_info('{}, {}, {}, {}, {}, {}'.format(js_container.name, + js_container.active_branch, str(js_container.owner), + str(js_container.reference), str(js_container.template), + last_updated)) + except (DlpxException, HttpError, RequestError) as e: + print_exception('\nERROR: JS Containers could not be listed. The ' + 'error was:\n\n{}'.format(e)) + + +def list_hierarchy(dlpx_obj, container_name): + """ + Filter container listing. + + dlpx_obj: Virtualization Engine session object + container_name: Name of the container to list child VDBs. + """ + + database_dct = {} + layout_ref = find_obj_by_name(dlpx_obj.server_session, container, + container_name).reference + for ds in datasource.get_all(dlpx_obj.server_session, + data_layout=layout_ref): + db_name = (find_obj_name(dlpx_obj.server_session, + database, ds.container)) + if hasattr(ds.runtime, 'jdbc_strings'): + database_dct[db_name] = ds.runtime.jdbc_strings + else: + database_dct[db_name] = 'None' + try: + print_info('Container: {}\nRelated VDBs: {}\n'.format( + container_name, convert_dct_str(database_dct))) + except (AttributeError, DlpxException) as e: + print_exception(e) + + +def convert_dct_str(obj_dct): + """ + Convert dictionary into a string for printing + + obj_dct: Dictionary to convert into a string + :return: string object + """ + js_str = '' + + if isinstance(obj_dct, dict): + for js_db, js_jdbc in obj_dct.iteritems(): + if isinstance(js_jdbc, list): + js_str += '{}: {}\n'.format(js_db, ', '.join(js_jdbc)) + elif isinstance(js_jdbc, str): + js_str += '{}: {}\n'.format(js_db, js_jdbc) + else: + raise DlpxException('Passed a non-dictionary object to ' + 'convert_dct_str(): {}'.format(type(obj_dct))) + return js_str + + +def build_ds_params(dlpx_obj, obj, db): + """ + Builds the datasource parameters + + dlpx_obj: Virtualization Engine session object + obj: object type to use when finding db + db: Name of the database to use when building the parameters + """ + ds_params = JSDataSourceCreateParameters() + ds_params.source = {'type': 'JSDataSource', 'name': db} + try: + db_obj = find_obj_by_name(dlpx_obj.server_session, obj, db) + ds_params.container = db_obj.reference + return ds_params + except RequestError as e: + print_exception('\nCould not find {}\n{}'.format(db, e.message)) + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + + E.g.: + @run_async + def task1(): + do_something + + @run_async + def task2(): + do_something_too + + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target=func, args=args, kwargs=kwargs) + func_hl.start() + return func_hl + + return async_func + + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + return round((time() - time_start)/60, +1) + + +@run_async +def main_workflow(engine, dlpx_obj): + """ + This function is where we create our main workflow. + Use the @run_async decorator to run this function asynchronously. + The @run_async decorator allows us to run against multiple Delphix Engine + simultaneously + + engine: Dictionary of engines + dlpx_obj: Virtualization Engine session object + """ + + try: + #Setup the connection to the Delphix Engine + dlpx_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + + except DlpxException as e: + print_exception('\nERROR: Engine {} encountered an error while ' + 'creating the session:\n{}\n'.format( + dlpx_obj.dlpx_engines['hostname'], e)) + sys.exit(1) + + thingstodo = ["thingtodo"] + try: + with dlpx_obj.job_mode(single_thread): + while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0: + if len(thingstodo) > 0: + if arguments['--create_container']: + create_container(dlpx_obj, + arguments['--template_name'], + arguments['--create_container'], + arguments['--database']) + elif arguments['--delete_container']: + delete_container(dlpx_obj, + arguments['--delete_container'], + arguments['--keep_vdbs']) + elif arguments['--list']: + list_containers(dlpx_obj) + elif arguments['--remove_owner']: + remove_owner(dlpx_obj, arguments['--remove_owner'], + arguments['--container_name']) + elif arguments['--restore_container']: + restore_container(dlpx_obj, + arguments['--restore_container'], + arguments['--bookmark_name']) + elif arguments['--add_owner']: + add_owner(dlpx_obj, arguments['--add_owner'], + arguments['--container_name']) + elif arguments['--refresh_container']: + refresh_container(dlpx_obj, + arguments['--refresh_container']) + elif arguments['--list_hierarchy']: + list_hierarchy(dlpx_obj, arguments['--list_hierarchy']) + thingstodo.pop() + # get all the jobs, then inspect them + i = 0 + for j in dlpx_obj.jobs.keys(): + job_obj = job.get(dlpx_obj.server_session, + dlpx_obj.jobs[j]) + print_debug(job_obj) + print_info('{}: JS Container operations: {}'.format( + engine['hostname'], job_obj.job_state)) + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + # If the job is in a non-running state, remove it + # from the + # running jobs list. + del dlpx_obj.jobs[j] + elif job_obj.job_state in 'RUNNING': + # If the job is in a running state, increment the + # running job count. + i += 1 + print_info('{}: {:d} jobs running.'.format( + engine['hostname'], i)) + # If we have running jobs, pause before repeating the + # checks. + if len(dlpx_obj.jobs) > 0: + sleep(float(arguments['--poll'])) + + except (DlpxException, RequestError, JobError, HttpError) as e: + print '\nError in js_container: {}:\n{}'.format(engine['hostname'], e) + sys.exit(1) + + +def run_job(dlpx_obj, config_file_path): + """ + This function runs the main_workflow aynchronously against all the + servers specified + + dlpx_obj: Virtualization Engine session object + config_file_path: filename of the configuration file for virtualization + engines + """ + + #Create an empty list to store threads we create. + threads = [] + engine = None + + #If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info("Executing against all Delphix Engines in the dxtools.conf") + + try: + #For each server in the dxtools.conf... + for delphix_engine in dlpx_obj.dlpx_engines: + engine = dlpx_obj.dlpx_engines[delphix_engine] + #Create a new thread and add it to the list. + threads.append(main_workflow(engine, dlpx_obj)) + + except DlpxException as e: + print 'Error encountered in run_job():\n{}'.format(e) + sys.exit(1) + + elif arguments['--all'] is False: + #Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dlpx_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: {}\n'.format( + arguments['--engine'])) + except (DlpxException, RequestError, KeyError) as e: + print_exception('\nERROR: Delphix Engine {} cannot be ' + 'found in {}. Please check your value ' + 'and try again. Exiting.\n'.format( + arguments['--engine'], config_file_path)) + else: + #Else search for a default engine in the dxtools.conf + for delphix_engine in dlpx_obj.dlpx_engines: + if dlpx_obj.dlpx_engines[delphix_engine]['default'] == \ + 'true': + engine = dlpx_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: {}'.format( + dlpx_obj.dlpx_engines[delphix_engine]['hostname'])) + break + if engine is None: + raise DlpxException("\nERROR: No default engine found. Exiting") + + #run the job against the engine + threads.append(main_workflow(engine, dlpx_obj)) + #For each thread in the list... + for each in threads: + #join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def main(): + """ + Main function - setup global variables and timer + """ + #We want to be able to call on these variables anywhere in the script. + global single_thread + global time_start + global debug + + if arguments['--debug']: + debug = True + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + single_thread = False + config_file_path = arguments['--config'] + #Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + #This is the function that will handle processing main_workflow for + # all the servers. + run_job(dx_session_obj, config_file_path) + + elapsed_minutes = time_elapsed() + print_info('script took {:.2f} minutes to get this far.'.format( + elapsed_minutes)) + + #Here we handle what we do when the unexpected happens + except SystemExit as e: + #This is what we use to handle our sys.exit(#) + sys.exit(e) + except DlpxException as e: + #We use this exception handler when an error occurs in a function call. + print_exception('\nERROR: Please check the ERROR message ' + 'below:\n{}'.format(e.message)) + sys.exit(2) + except HttpError as e: + #We use this exception handler when our connection to Delphix fails + print '\nERROR: Connection failed to the Delphix Engine. Please ' \ + 'check the ERROR message below:\n{}'.format(e.message) + sys.exit(2) + except JobError as e: + #We use this exception handler when a job fails in Delphix so that we + #have actionable data + print 'A job failed in the Delphix Engine:\n{}'.format(e.job) + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + sys.exit(3) + except KeyboardInterrupt: + #We use this exception handler to gracefully handle ctrl+c exits + print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + except: + #Everything else gets caught here + print sys.exc_info()[0] + print traceback.format_exc() + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + sys.exit(1) + + +if __name__ == "__main__": + #Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + + #Feed our arguments to the main function, and off we go! + main() diff --git a/v1_8_2/js_template.py b/v1_8_2/js_template.py new file mode 100755 index 0000000..ed63221 --- /dev/null +++ b/v1_8_2/js_template.py @@ -0,0 +1,413 @@ +#!/usr/bin/env python +# Program Name : js_template.py +# Description : Delphix implementation script +# Author : Corey Brune +# Created: March 4 2016 +# +# Copyright (c) 2016 by Delphix. +# All rights reserved. +# See http://docs.delphix.com/display/PS/Copyright+Statement for details +# +# Delphix Support statement available at +# See http://docs.delphix.com/display/PS/PS+Script+Support+Policy for details +# +# Warranty details provided in external file +# for customers who have purchased support. +# +"""Creates, deletes and lists JS templates. +Usage: + js_template.py (--create_template --database | --list_templates | --delete_template ) + [--engine | --all] [--parallel ] + [--poll ] [--debug] + [--config ] [--logdir ] + js_template.py -h | --help | -v | --version + +Creates, Lists, Removes a Jet Stream Template + +Examples: + js_template.py --list_templates + js_template.py --create_template jstemplate1 --database + js_template.py --create_template jstemplate2 --database + js_template.py --delete_template jstemplate1 + +Options: + --create_template Name of the new JS Template + --delete_template Delete the JS Template + --database Name of the database(s) to use for the JS Template + Note: If adding multiple template DBs, use a + comma (:) to delineate between the DB names. + --list_templates List the templates on a given engine + --engine Alt Identifier of Delphix engine in dxtools.conf. + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./js_template.log] + -h --help Show this screen. + -v --version Show version. +""" + +VERSION="v.0.0.015" + +from docopt import docopt +from os.path import basename +import sys +import traceback +from time import time, sleep + +from delphixpy.web import job +from delphixpy.web.jetstream import template +from delphixpy.web import database +from delphixpy.web.vo import JSDataTemplateCreateParameters +from delphixpy.web.vo import JSDataSourceCreateParameters +from delphixpy.exceptions import RequestError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import HttpError + +from lib.DlpxException import DlpxException +from lib.GetSession import GetSession +from lib.GetReferences import find_obj_by_name +from lib.GetReferences import convert_timestamp +from lib.DxLogging import logging_est +from lib.DxLogging import print_info +from lib.DxLogging import print_debug +from lib.DxLogging import print_exception + + +def create_template(dlpx_obj, template_name, database_name): + """ + Create the JS Template + + dlpx_obj: Virtualization Engine session object + template_name: Name of the template to create + database_name: Name of the database(s) to use in the template + """ + + js_template_params = JSDataTemplateCreateParameters() + js_template_params.name = template_name + template_ds_lst = [] + engine_name = dlpx_obj.dlpx_engines.keys()[0] + + for db in database_name.split(':'): + template_ds_lst.append(build_ds_params(dlpx_obj, database, db)) + try: + js_template_params.data_sources = template_ds_lst + js_template_params.type = 'JSDataTemplateCreateParameters' + template.create(dlpx_obj.server_session, js_template_params) + dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job + print_info('Template {} was created successfully.\n'.format( + template_name)) + except (DlpxException, RequestError, HttpError) as e: + print_exception('\nThe template {} was not created. The error ' + 'was:\n\n{}'.format(template_name, e)) + + +def list_templates(dlpx_obj): + """ + List all templates on a given engine + + dlpx_obj: Virtualization Engine session object + """ + + header = 'Name, Reference, Active Branch, Last Updated' + + try: + print header + js_templates = template.get_all(dlpx_obj.server_session) + for js_template in js_templates: + last_updated = convert_timestamp(dlpx_obj.server_session, + js_template.last_updated[:-5]) + print_info('{}, {}, {}, {}'.format(js_template.name, + js_template.reference, + js_template.active_branch, + last_updated)) + except (DlpxException, HttpError, RequestError) as e: + raise DlpxException('\nERROR: The templates could not be listed. ' + 'The error was:\n\n{}'.format(e.message)) + + +def delete_template(dlpx_obj, template_name): + """ + Deletes a template + + dlpx_obj: Virtualization Engine session object + template_name: Template to delete + """ + + try: + template_obj = find_obj_by_name(dlpx_obj.server_session, + template, template_name) + template.delete(dlpx_obj.server_session, + template_obj.reference) + print 'Template {} is deleted.'.format(template_name) + except (DlpxException, HttpError, RequestError) as e: + print_exception('\nERROR: The template {} was not deleted. The' + ' error was:\n\n{}'.format(template_name, e.message)) + + +def build_ds_params(dlpx_obj, obj, db): + """ + Builds the datasource parameters + + dlpx_obj: Virtualization Engine session object + obj: object type to use when finding db + db: Name of the database to use when building the parameters + """ + + try: + db_obj = find_obj_by_name(dlpx_obj.server_session, + obj, db) + ds_params = JSDataSourceCreateParameters() + ds_params.source = {'type':'JSDataSource', 'name': db} + ds_params.container = db_obj.reference + return ds_params + except RequestError as e: + print_exception('\nCould not find {}\n{}'.format(db, e.message)) + + +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + + E.g.: + @run_async + def task1(): + do_something + + @run_async + def task2(): + do_something_too + + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target = func, args = args, kwargs = kwargs) + func_hl.start() + return func_hl + + return async_func + + +def time_elapsed(): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + """ + return round((time() - time_start)/60, +1) + + +@run_async +def main_workflow(engine, dlpx_obj): + """ + This function is where we create our main workflow. + Use the @run_async decorator to run this function asynchronously. + The @run_async decorator allows us to run against multiple Delphix Engine + simultaneously + + engine: Dictionary of engines + dlpx_obj: Virtualization Engine session object + """ + + try: + #Setup the connection to the Delphix Engine + dlpx_obj.serversess(engine['ip_address'], engine['username'], + engine['password']) + except DlpxException as e: + print_exception('\nERROR: Engine {} encountered an error while ' + 'provisioning {}:\n{}\n'.format( + dlpx_obj.engine['hostname'], arguments['--target'], e)) + sys.exit(1) + + thingstodo = ["thingtodo"] + try: + with dlpx_obj.job_mode(single_thread): + while (len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0): + if len(thingstodo) > 0: + if arguments['--create_template']: + create_template(dlpx_obj, + arguments['--create_template'], + arguments['--database']) + elif arguments['--delete_template']: + delete_template(dlpx_obj, + arguments['--delete_template']) + elif arguments['--list_templates']: + list_templates(dlpx_obj) + thingstodo.pop() + # get all the jobs, then inspect them + i = 0 + for j in dlpx_obj.jobs.keys(): + job_obj = job.get(dlpx_obj.server_session, + dlpx_obj.jobs[j]) + print_debug(job_obj) + print_info('{}: Provisioning JS Template: {}'.format( + engine['hostname'], job_obj.job_state)) + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + # If the job is in a non-running state, remove it + # from the running jobs list. + del dlpx_obj.jobs[j] + elif job_obj.job_state in 'RUNNING': + # If the job is in a running state, increment the + # running job count. + i += 1 + print_info('{}: {:d} jobs running.'.format( + engine['hostname'], i)) + # If we have running jobs, pause before repeating the + # checks. + if len(dlpx_obj.jobs) > 0: + sleep(float(arguments['--poll'])) + except (DlpxException, RequestError, JobError, HttpError) as e: + print_exception('\nError in js_template: {}:\n{}'.format( + engine['hostname'], e)) + sys.exit(1) + + +def run_job(dlpx_obj, config_file_path): + """ + This function runs the main_workflow aynchronously against all the + servers specified + + dlpx_obj: Virtualization Engine session object + config_file_path: path containing the dxtools.conf file. + """ + # Create an empty list to store threads we create. + threads = [] + engine = None + + # If the --all argument was given, run against every engine in dxtools.conf + if arguments['--all']: + print_info('Executing against all Delphix Engines in the dxtools.conf') + + try: + # For each server in the dxtools.conf... + for delphix_engine in dlpx_obj.dlpx_engines: + engine = dlpx_obj.dlpx_engines[delphix_engine] + # Create a new thread and add it to the list. + threads.append(main_workflow(engine, dlpx_obj)) + except DlpxException as e: + print 'Error encountered in run_job():\n{}'.format(e) + sys.exit(1) + elif arguments['--all'] is False: + # Else if the --engine argument was given, test to see if the engine + # exists in dxtools.conf + if arguments['--engine']: + try: + engine = dlpx_obj.dlpx_engines[arguments['--engine']] + print_info('Executing against Delphix Engine: {}\n'.format( + arguments['--engine'])) + + except (DlpxException, RequestError, KeyError): + raise DlpxException('\nERROR: Delphix Engine {} cannot be ' 'found in %s. Please check your value ' + 'and try again. Exiting.\n'.format( + arguments['--engine'], config_file_path)) + else: + # Else search for a default engine in the dxtools.conf + for delphix_engine in dlpx_obj.dlpx_engines: + if dlpx_obj.dlpx_engines[delphix_engine]['default'] == 'true': + engine = dlpx_obj.dlpx_engines[delphix_engine] + print_info('Executing against the default Delphix Engine ' + 'in the dxtools.conf: {}'.format( + dlpx_obj.dlpx_engines[delphix_engine]['hostname'])) + break + if engine is None: + raise DlpxException('\nERROR: No default engine found. Exiting') + # run the job against the engine + threads.append(main_workflow(engine, dlpx_obj)) + + # For each thread in the list... + for each in threads: + # join them back together so that we wait for all threads to complete + # before moving on + each.join() + + +def main(): + # We want to be able to call on these variables anywhere in the script. + global single_thread + global time_start + global debug + + try: + dx_session_obj = GetSession() + logging_est(arguments['--logdir']) + print_debug(arguments) + time_start = time() + config_file_path = arguments['--config'] + + + logging_est(arguments['--logdir']) + print_debug(arguments) + single_thread = False + # Parse the dxtools.conf and put it into a dictionary + dx_session_obj.get_config(config_file_path) + + # This is the function that will handle processing main_workflow for + # all the servers. + run_job(dx_session_obj, config_file_path) + + elapsed_minutes = time_elapsed() + print_info('script took {:.2f} to get this far.'.format( + elapsed_minutes)) + + # Here we handle what we do when the unexpected happens + except SystemExit as e: + # This is what we use to handle our sys.exit(#) + sys.exit(e) + + except DlpxException as e: + # We use this exception handler when an error occurs in a function call. + print_info('\nERROR: Please check the ERROR message below:\n{}'.format( + e.message)) + sys.exit(2) + + except HttpError as e: + # We use this exception handler when our connection to Delphix fails + print_info('\nERROR: Connection failed to the Delphix Engine. Please ' + 'check the ERROR message below:\n{}'.format(e.message)) + sys.exit(2) + + except JobError as e: + # We use this exception handler when a job fails in Delphix so that we + # have actionable data + print('A job failed in the Delphix Engine:\n{}'.format(e.job)) + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + sys.exit(3) + + except KeyboardInterrupt: + # We use this exception handler to gracefully handle ctrl+c exits + print_debug('You sent a CTRL+C to interrupt the process') + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + except: + # Everything else gets caught here + print '{}\n{}'.format(sys.exc_info()[0], traceback.format_exc()) + elapsed_minutes = time_elapsed() + print_info('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) + sys.exit(1) + + +if __name__ == "__main__": + # Grab our arguments from the doc at the top of the script + arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) + + # Feed our arguments to the main function, and off we go! + main() diff --git a/v1_8_2/lib/DlpxException.py b/v1_8_2/lib/DlpxException.py new file mode 100644 index 0000000..e6db45c --- /dev/null +++ b/v1_8_2/lib/DlpxException.py @@ -0,0 +1,14 @@ +""" +Custom exception class for delphixpy scripts +""" + +from DxLogging import print_exception + +class DlpxException(Exception): + """ + Delphix Exception class. Exit signals are handled by calling method. + """ + + + def __init__(self, message): + Exception.__init__(self, message) diff --git a/v1_8_2/lib/DxLogging.py b/v1_8_2/lib/DxLogging.py new file mode 100644 index 0000000..b2ef466 --- /dev/null +++ b/v1_8_2/lib/DxLogging.py @@ -0,0 +1,63 @@ +""" +Package DxLogging +""" + +import logging + +VERSION = 'v.0.1.005' + +def logging_est(logfile_path, debug=False): + """ + Establish Logging + + logfile_path: path to the logfile. Default: current directory. + debug: Set debug mode on (True) or off (False). Default: False + """ + + logging.basicConfig(filename=logfile_path, + format='%(levelname)s:%(asctime)s:%(message)s', + level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') + + logger = logging.getLogger() + + if debug is True: + logger.setLevel(10) + print_info('Debug Logging is enabled.') + + +def print_debug(print_obj, debug=False): + """ + Call this function with a log message to prefix the message with DEBUG + + print_obj: Object to print to logfile and stdout + debug: Flag to enable debug logging. Default: False + :rtype: None + """ + try: + if debug is True: + print 'DEBUG: {}'.format(str(print_obj)) + logging.debug(str(print_obj)) + except: + pass + + +def print_info(print_obj): + """ + Call this function with a log message to prefix the message with INFO + """ + print 'INFO: {}'.format(str(print_obj)) + logging.info(str(print_obj)) + +def print_warning(print_obj): + """ + Call this function with a log message to prefix the message with INFO + """ + print 'WARN: %s' % (str(print_obj)) + logging.warn(str(print_obj)) + +def print_exception(print_obj): + """ + Call this function with a log message to prefix the message with EXCEPTION + """ + print str(print_obj) + logging.exception('EXCEPTION: {}'.format(str(print_obj))) diff --git a/v1_8_2/lib/DxTimeflow.py b/v1_8_2/lib/DxTimeflow.py new file mode 100644 index 0000000..3d58f9c --- /dev/null +++ b/v1_8_2/lib/DxTimeflow.py @@ -0,0 +1,313 @@ +""" +List, create, destroy and refresh Delphix timeflows +""" +# TODO: +# implement debug flag + +import re +import sys + +from delphixpy.exceptions import HttpError, JobError, RequestError +from delphixpy.web import database +from delphixpy.web import timeflow +from delphixpy.web import snapshot +from delphixpy import job_context +from delphixpy.web.timeflow import bookmark +from delphixpy.web.vo import OracleRefreshParameters +from delphixpy.web.vo import OracleTimeflowPoint +from delphixpy.web.vo import RefreshParameters +from delphixpy.web.vo import TimeflowPointLocation +from delphixpy.web.vo import MSSqlTimeflowPoint +from delphixpy.web.vo import TimeflowPointTimestamp +from delphixpy.web.vo import TimeflowPointSemantic + +from DlpxException import DlpxException +from GetReferences import get_obj_reference +from GetReferences import convert_timestamp +from GetReferences import find_obj_by_name +from DxLogging import print_exception + +VERSION = 'v.0.2.003' + +class DxTimeflow(object): + """Shared methods for timeflows """ + + def __init__(self, engine): + self.engine = engine + + + def get_timeflow_reference(self, db_name): + """ + Return current_timeflow for the db_name + + db_name: The database name to retrieve current_timeflow + """ + + db_lst = database.get_all(self.engine) + + for db_obj in db_lst: + if db_obj.name == db_name: + return db_obj.current_timeflow + + raise DlpxException('Timeflow reference not found for {}'.format( + db_name)) + + + def list_timeflows(self): + """ + Retrieve and print all timeflows for a given engine + """ + + all_timeflows = timeflow.get_all(self.engine) + + print 'DB Name, Timeflow Name, Timestamp' + for tfbm_lst in all_timeflows: + + try: + db_name = get_obj_reference(self.engine, database, + tfbm_lst.container) + + print '{}, {}, {}\n'.format(str(db_name), + str(tfbm_lst.name), + str(tfbm_lst.parent_point.timestamp)) + + except AttributeError: + print '{}, {}\n'.format(str(tfbm_lst.name), str(db_name)) + + except TypeError as e: + raise DlpxException('Listing Timeflows encountered an error' + ':\n{}'.format((e))) + + except RequestError as e: + dlpx_err = e.message + raise DlpxException(dlpx_err.action) + + except (JobError, HttpError) as e: + raise DlpxException(e) + + + def create_bookmark(self, bookmark_name, db_name, timestamp=None, + location=None): + """ + Create a timeflow bookmark + + bookmark_name: Bookmark's name + db_name: The database name to re + timestamp: Timestamp for the bookmark. + Required format is (UTC/Zulu): YYYY-MM-DDTHH:MM:SS.000Z + location: Location of the bookmark + """ + + global bookmark_type + tf_ref = self.get_timeflow_reference(db_name) + + if re.search('ORAC', tf_ref, re.IGNORECASE): + bookmark_type = 'OracleTimeflowPoint' + otfp = OracleTimeflowPoint() + elif re.search('MSSql', tf_ref, re.IGNORECASE): + bookmark_type = 'MSSqlTimeflowPoint' + otfp = MSSqlTimeflowPoint() + + otfp.type = bookmark_type + otfp.timeflow = tf_ref + + if timestamp is not None: + otfp.timestamp = timestamp + else: + otfp.location = location + + tf_create_params = TimeflowBookmarkCreateParameters() + tf_create_params.name = bookmark_name + tf_create_params.timeflow_point = otfp + + try: + print 'Bookmark {} successfully created with reference {}'.format( + bookmark.bookmark.create(self.engine, tf_create_params)) + + except RequestError as e: + raise DlpxException(e.message) + + except (JobError, HttpError): + print_exception('Fatal exception caught while creating the' + 'Timeflow Bookmark:\n{}\n'.format( + sys.exc_info()[0])) + + + def get_bookmarks(self, parsable=False): + """ + Print all Timeflow Bookmarks + + parsable (optional): Flag to print output in a parsable format. + """ + + all_bookmarks = bookmark.bookmark.get_all(self.engine) + + if parsable is False: + print('\nBookmark name\tReference\tTimestamp\t' + 'Location\tTimeflow\n') + + elif parsable is True: + print 'Bookmark name,Reference,Timestamp,Location,Timeflow' + + for tfbm_lst in all_bookmarks: + try: + if tfbm_lst.timestamp is None: + converted_timestamp = None + + else: + converted_timestamp = \ + convert_timestamp(self.engine, tfbm_lst.timestamp[:-5]) + + if parsable is False: + print '{} {} {} {} {}'.format(tfbm_lst.name, + tfbm_lst.reference, str(converted_timestamp), + tfbm_lst.location, tfbm_lst.timeflow) + elif parsable is True: + print '{},{},{},{},{}'.format(tfbm_lst.name, + tfbm_lst.reference, str(converted_timestamp), + tfbm_lst.location, tfbm_lst.timeflow) + + except TypeError: + print 'No timestamp found for {}'.format(tfbm_lst.name) + + except RequestError as e: + dlpx_err = e.message + raise DlpxException(dlpx_err.action) + + + def find_snapshot(self, database_ref, timestamp, snap_name=None, + snap_time=None): + """ + Method to find a snapshot by name + + database_obj: database reference for the snapshot lookup + snap_name: name of the snapshot. Default: None + snap_time: time of the snapshot. Default: None + """ + + snapshots = snapshot.get_all(self.engine, database=database_ref) + + matches = [] + for snapshot_obj in snapshots: + if (str(snapshot_obj.name).startswith(timestamp) and + snap_name is not None): + + matches.append(snapshot_obj) + + elif (str(snapshot_obj.latest_change_point.timestamp).startswith(timestamp) + and snap_time is not None): + + matches.append(snapshot_obj) + + if len(matches) == 1: + return matches[0] + + elif len(matches) > 1: + raise DlpxException('{}: The name specified was not specific ' + 'enough. More than one match found.\n'.format( + self.engine.address)) + + elif len(matches) < 1: + raise DlpxException('{}: No matches found for the time ' + 'specified.\n'.format(self.engine.address)) + + + def set_timeflow_point(self, container_obj, timestamp_type, + timestamp='LATEST', timeflow_name=None): + """ + This method returns the reference of the timestamp specified. + container_obj: Delphix object containing the snapshot/timeflow to be + provisioned. + timestamp_type: Type of timestamp - SNAPSHOT or TIME + timestamp: Name of timestamp/snapshot. Default: Latest + """ + + if timestamp_type.upper() == "SNAPSHOT": + if timestamp.upper() == "LATEST": + timeflow_point_parameters = TimeflowPointSemantic() + timeflow_point_parameters.container = container_obj.reference + timeflow_point_parameters.location = "LATEST_SNAPSHOT" + + elif timestamp.startswith("@"): + snapshot_obj = self.find_snapshot(container_obj.reference, + timestamp, snap_name=True) + + if snapshot_obj: + timeflow_point_parameters=TimeflowPointLocation() + timeflow_point_parameters.timeflow = snapshot_obj.timeflow + timeflow_point_parameters.location = \ + snapshot_obj.latest_change_point.location + + else: + raise DlpxException('ERROR: Was unable to use the ' + 'specified snapshot {}' + 'for database {}'.format(timestamp, + container_obj.name)) + + elif timestamp: + snapshot_obj = self.find_snapshot(container_obj.reference, + timestamp, snap_time=True) + + if snapshot_obj: + timeflow_point_parameters=TimeflowPointTimestamp() + timeflow_point_parameters.timeflow = snapshot_obj.timeflow + timeflow_point_parameters.timestamp = \ + snapshot_obj.latest_change_point.timestamp + + elif snapshot_obj is None: + print_exception('Was unable to find a suitable time' + ' for {} for database {}'.format( + (timestamp, container_obj.name))) + + elif timestamp_type.upper() == "TIME": + if timestamp.upper() == "LATEST": + timeflow_point_parameters = TimeflowPointSemantic() + timeflow_point_parameters.location = "LATEST_POINT" + + elif timestamp: + timeflow_point_parameters = TimeflowPointTimestamp() + timeflow_point_parameters.type = 'TimeflowPointTimestamp' + timeflow_obj = find_obj_by_name(self.engine, timeflow, + timeflow_name) + + timeflow_point_parameters.timeflow = timeflow_obj.reference + timeflow_point_parameters.timestamp = timestamp + return timeflow_point_parameters + else: + raise DlpxException('{} is not a valid timestamp_type. Exiting' + '\n'.format(timestamp_type)) + + timeflow_point_parameters.container = container_obj.reference + return timeflow_point_parameters + + + def refresh_container(self, parent_bookmark_ref, db_type, child_db_ref): + """ + Refreshes a container + + parent_bookmark_ref: The parent's bookmark reference. + db_type: The database type + child_db_ref: The child database reference + """ + + if db_type == 'Oracle': + tf_params = OracleRefreshParameters() + else: + tf_params = RefreshParameters() + + tf_params.timeflow_point_parameters = {'type': 'TimeflowPointBookmark', + 'bookmark': parent_bookmark_ref} + + try: + with job_context.async(self.engine): + db_ret_val = database.refresh(self.engine, child_db_ref, + tf_params) + return db_ret_val + + except RequestError as e: + dlpx_err = e.message + raise DlpxException(dlpx_err.action) + + except (JobError, HttpError) as e: + print_exception('Exception caught during refresh:\n{}'.format( + sys.exc_info()[0])) diff --git a/v1_8_2/lib/GetReferences.py b/v1_8_2/lib/GetReferences.py new file mode 100644 index 0000000..88d4b34 --- /dev/null +++ b/v1_8_2/lib/GetReferences.py @@ -0,0 +1,288 @@ +""" +Module that provides lookups of references and names of Delphix objects. +""" + +import re +from datetime import datetime +from dateutil import tz + +from delphixpy.web.service import time +from delphixpy.exceptions import RequestError +from delphixpy.exceptions import HttpError +from delphixpy.exceptions import JobError +from delphixpy.web import repository +from delphixpy.web import database +from delphixpy.web import source +from delphixpy.web import job +from delphixpy.web import sourceconfig + +from DlpxException import DlpxException +from DxLogging import print_debug +from DxLogging import print_exception + +VERSION = 'v.0.2.0019' + +def convert_timestamp(engine, timestamp): + """ + Convert timezone from Zulu/UTC to the Engine's timezone + engine: A Delphix engine session object. + timestamp: the timstamp in Zulu/UTC to be converted + """ + + default_tz = tz.gettz('UTC') + engine_tz = time.time.get(engine) + + try: + convert_tz = tz.gettz(engine_tz.system_time_zone) + utc = datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S') + utc = utc.replace(tzinfo=default_tz) + converted_tz = utc.astimezone(convert_tz) + engine_local_tz = '{} {} {}'.format(str(converted_tz.date()), + str(converted_tz.time()), + str(converted_tz.tzname())) + + return engine_local_tz + except TypeError: + return None + + +def find_all_objects(engine, f_class): + """ + Return all objects from a given class + engine: A Delphix engine session object + f_class: The objects class. I.E. database or timeflow. + :return: List of objects + """ + + return_lst = [] + + try: + return f_class.get_all(engine) + + except (JobError, HttpError) as e: + raise DlpxException('{} Error encountered in {}: {}\n'.format( + engine.address, f_class, e)) + + +def find_obj_specs(engine, obj_lst): + """ + Function to find objects for replication + engine: Delphix Virtualization session object + obj_lst: List of names for replication + :return: List of references for the given object names + """ + rep_lst = [] + for obj in obj_lst: + rep_lst.append(find_obj_by_name(engine, database, obj).reference) + return rep_lst + + +def get_running_job(engine, target_ref): + """ + Function to find a running job from the DB target reference. + :param engine: A Virtualization engine session object + :param target_ref: Reference to the target of the running job + :return: + """ + return job.get_all(engine, target=target_ref, + job_state='RUNNING')[0].reference + + +def find_obj_list(obj_lst, obj_name): + """ + Function to find an object in a list of objects + obj_lst: List containing objects from the get_all() method + obj_name: Name of the object to match + :return: The named object. None is returned if no match is found.` + """ + for obj in obj_lst: + if obj_name == obj.name: + return obj + return None + + +def find_obj_by_name(engine, f_class, obj_name, active_branch=False): + """ + Function to find objects by name and object class, and return object's + reference as a string + engine: A Delphix engine session object + f_class: The objects class. I.E. database or timeflow. + obj_name: The name of the object + active_branch: Default = False. If true, return list containing + the object's reference and active_branch. Otherwise, return + the reference. + """ + + return_list = [] + + try: + all_objs = f_class.get_all(engine) + except AttributeError as e: + raise DlpxException('Could not find reference for object class' + '{}.\n'.format(e)) + for obj in all_objs: + if obj.name == obj_name: + + if active_branch is False: + return(obj) + + #This code is for JS objects only. + elif active_branch is True: + return_list.append(obj.reference) + return_list.append(obj.active_branch) + return(return_list) + + return obj + + #If the object isn't found, raise an exception. + raise DlpxException('{} was not found on engine {}.\n'.format( + obj_name, engine.address)) + +def find_source_by_dbname(engine, f_class, obj_name, active_branch=False): + """ + Function to find sources by database name and object class, and return object's + reference as a string + engine: A Delphix engine session object + f_class: The objects class. I.E. database or timeflow. + obj_name: The name of the database object in Delphix + active_branch: Default = False. If true, return list containing + the object's reference and active_branch. Otherwise, return + the reference. + """ + + return_list = [] + + try: + all_objs = f_class.get_all(engine) + except AttributeError as e: + raise DlpxException('Could not find reference for object class' + '{}.\n'.format(e)) + for obj in all_objs: + + if obj.name == obj_name: + print_debug('object: {}\n\n'.format(obj)) + print_debug(obj.name) + print_debug(obj.reference) + source_obj = source.get_all(engine,database=obj.reference) + print_debug('source: {}\n\n'.format(source_obj)) + return source_obj[0] + + #If the object isn't found, raise an exception. + raise DlpxException('{} was not found on engine {}.\n'.format( + obj_name, engine.address)) + + +def get_obj_reference(engine, obj_type, obj_name, search_str=None, + container=False): + """ + Return the reference for the provided object name + engine: A Delphix engine object. + results: List containing object name + search_str (optional): string to search within results list + container (optional): search for container instead of name + """ + + ret_lst = [] + + results = obj_type.get_all(engine) + + for result in results: + if container is False: + if result.name == obj_name: + ret_lst.append(result.reference) + + if search_str: + if re.search(search_str, result.reference, re.IGNORECASE): + ret_lst.append(True) + else: + ret_lst.append(False) + + return ret_lst + else: + if result.container == obj_name: + ret_lst.append(result.reference) + + return ret_lst + + raise DlpxException('Reference not found for {}'.format(obj_name)) + + +def find_obj_name(engine, f_class, obj_reference): + """ + Return the obj name from obj_reference + + engine: A Delphix engine object. + f_class: The objects class. I.E. database or timeflow. + obj_reference: The object reference to retrieve the name + """ + try: + obj_name = f_class.get(engine, obj_reference) + return obj_name.name + + except RequestError as e: + raise DlpxException(e) + + except (JobError, HttpError) as e: + raise DlpxException(e.message) + + +def find_dbrepo(engine, install_type, f_environment_ref, f_install_path): + """ + Function to find database repository objects by environment reference and + install path, and return the object's reference as a string + You might use this function to find Oracle and PostGreSQL database repos. + engine: Virtualization Engine Session object + install_type: Type of install - Oracle, ASE, SQL + f_environment_ref: Reference of the environment for the repository + f_install_path: Path to the installation directory. + return: delphixpy.web.vo.SourceRepository object + """ + + print_debug('Searching objects in the %s class for one with the ' + 'environment reference of %s and an install path of %s' % + (install_type, f_environment_ref, f_install_path)) + #import pdb;pdb.set_trace() + all_objs = repository.get_all(engine, environment=f_environment_ref) + for obj in all_objs: + if 'OracleInstall' == install_type: + if (obj.type == install_type and + obj.installation_home == f_install_path): + + print_debug('Found a match %s'.format(obj.reference)) + return obj + + elif 'MSSqlInstance' == install_type: + if (obj.type == install_type and + obj.instance_name == f_install_path): + + print_debug('Found a match {}'.format(obj.reference)) + return obj + + else: + raise DlpxException('No Repo match found for type {}.\n'.format( + install_type)) + +def find_sourceconfig(engine, sourceconfig_name, f_environment_ref): + """ + Function to find database sourceconfig objects by environment reference and + sourceconfig name (db name), and return the object's reference as a string + You might use this function to find Oracle and PostGreSQL database + sourceconfigs. + engine: Virtualization Engine Session object + sourceconfig_name: Name of source config, usually name of db + instnace (ie. orcl) + f_environment_ref: Reference of the environment for the repository + return: delphixpy.web.vo.SourceConfig object + """ + + print_debug('Searching objects in the SourceConfig class for one with the ' + 'environment reference of %s and a name of %s' % + (f_environment_ref, sourceconfig_name)) + all_objs = sourceconfig.get_all(engine, environment=f_environment_ref) + for obj in all_objs: + if obj.name == sourceconfig_name: + print_debug('Found a match %s'.format(obj.reference)) + return obj + else: + raise DlpxException('No sourceconfig match found for type {}.' + '\n'.format(sourceconfig_name)) diff --git a/v1_8_2/lib/GetSession.py b/v1_8_2/lib/GetSession.py new file mode 100644 index 0000000..90fd7c2 --- /dev/null +++ b/v1_8_2/lib/GetSession.py @@ -0,0 +1,169 @@ +#!/usr/bin/env python +# Corey Brune - Oct 2016 +#This class handles the config file and authentication to a VE +#requirements +#pip install docopt delphixpy + +"""This module takes the conf file for VE(s) and returns an authentication + object +""" + +import json +import ssl +from time import sleep + +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.exceptions import RequestError +from delphixpy.exceptions import JobError +from delphixpy.exceptions import HttpError +from delphixpy import job_context +from delphixpy.web import job +from delphixpy.web import system + +from lib.DlpxException import DlpxException +from lib.DxLogging import print_debug +from lib.DxLogging import print_info + + +VERSION = 'v.0.2.09' + + +class GetSession(object): + """ + Class to get the configuration and returns an Delphix authentication + object + """ + + def __init__(self): + self.server_session = None + self.dlpx_engines = {} + self.jobs = {} + + + def __getitem__(self, key): + return self.data[key] + + + def get_config(self, config_file_path='./dxtools.conf'): + """ + This method reads in the dxtools.conf file + + config_file_path: path to the configuration file. + Default: ./dxtools.conf + """ + + #config_file_path = config_file_path + #config_file = None + + #First test to see that the file is there and we can open it + try: + with open(config_file_path) as config_file: + + #Now parse the file contents as json and turn them into a + #python dictionary, throw an error if it isn't proper json + config = json.loads(config_file.read()) + + except IOError: + raise DlpxException('\nERROR: Was unable to open {}. Please ' + 'check the path and permissions, and try ' + 'again.\n'.format(config_file_path)) + + except (ValueError, TypeError, AttributeError) as e: + raise DlpxException('\nERROR: Was unable to read {} as json. ' + 'Please check if the file is in a json format' + ' and try again.\n {}'.format(config_file_path, + e)) + + #Create a dictionary of engines (removing the data node from the + # dxtools.json, for easier parsing) + for each in config['data']: + self.dlpx_engines[each['hostname']] = each + + + def serversess(self, f_engine_address, f_engine_username, + f_engine_password, f_engine_namespace='DOMAIN'): + """ + Method to setup the session with the Virtualization Engine + + f_engine_address: The Virtualization Engine's address (IP/DNS Name) + f_engine_username: Username to authenticate + f_engine_password: User's password + f_engine_namespace: Namespace to use for this session. Default: DOMAIN + """ + +# if use_https: +# if hasattr(ssl, '_create_unverified_context'): +# ssl._create_default_https_context = \ +# ssl._create_unverified_context + + try: + if f_engine_password: + self.server_session = DelphixEngine(f_engine_address, + f_engine_username, + f_engine_password, + f_engine_namespace) + elif f_engine_password is None: + self.server_session = DelphixEngine(f_engine_address, + f_engine_username, + None, f_engine_namespace) + + except (HttpError, RequestError, JobError) as e: + raise DlpxException('ERROR: An error occurred while authenticating' + ' to {}:\n {}\n'.format(f_engine_address, e)) + + + def job_mode(self, single_thread=True): + """ + This method tells Delphix how to execute jobs, based on the + single_thread variable + + single_thread: Execute application synchronously (True) or + async (False) + Default: True + """ + + #Synchronously (one at a time) + if single_thread is True: + print_debug("These jobs will be executed synchronously") + return job_context.sync(self.server_session) + + #Or asynchronously + elif single_thread is False: + print_debug("These jobs will be executed asynchronously") + return job_context.async(self.server_session) + + + def job_wait(self): + """ + This job stops all work in the thread/process until jobs are completed. + + No arguments + """ + #Grab all the jos on the server (the last 25, be default) + all_jobs = job.get_all(self.server_session) + + #For each job in the list, check to see if it is running (not ended) + for jobobj in all_jobs: + if not (jobobj.job_state in ["CANCELED", "COMPLETED", "FAILED"]): + print_debug('\nDEBUG: Waiting for %s (currently: %s) to ' + 'finish running against the container.\n' % + (jobobj.reference, jobobj.job_state)) + + #If so, wait + job_context.wait(self.server_session, jobobj.reference) + + def server_wait(self): + """ + This job just waits for the Delphix Engine to be up and for a + succesful connection. + + No arguments + """ + while True: + try: + system.get(self.server_session) + break + except: + pass + print_info("Waiting for Delphix Engine to be ready") + sleep(3) diff --git a/v1_8_2/lib/__init__.py b/v1_8_2/lib/__init__.py new file mode 100644 index 0000000..9eae820 --- /dev/null +++ b/v1_8_2/lib/__init__.py @@ -0,0 +1,5 @@ +import DlpxException +import DxLogging +import DxTimeflow +import GetReferences +import GetSession diff --git a/v1_8_2/list_all_databases.py b/v1_8_2/list_all_databases.py new file mode 100755 index 0000000..901c533 --- /dev/null +++ b/v1_8_2/list_all_databases.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.web import database + + +server_session= DelphixEngine("landsharkengine", "delphix_admin", "landshark", "DOMAIN") + +all_databases = database.get_all(server_session) + +#print all_databases + +print str(len(all_databases)) + " databases in the LandsharkEngine" + +for each in all_databases: + print each.name diff --git a/v1_8_2/simple_snapshot.py b/v1_8_2/simple_snapshot.py new file mode 100755 index 0000000..0172b46 --- /dev/null +++ b/v1_8_2/simple_snapshot.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.web import group, database +group_name = "Dev Copies" +database_name = "Employee DB - Dev" + +server_session = DelphixEngine("landsharkengine", "delphix_admin", "landshark", "DOMAIN") + +all_groups = group.get_all(server_session) + +for each in all_groups: + if group_name == each.name: + group_reference = each.reference + break + +database_objs = database.get_all(server_session, group=group_reference) + +for obj in database_objs: + if database_name == obj.name: + database_reference = obj.reference + break + +database.sync(server_session, database_reference) diff --git a/v1_8_2/snapshot_group.py b/v1_8_2/snapshot_group.py new file mode 100755 index 0000000..1f9b1ba --- /dev/null +++ b/v1_8_2/snapshot_group.py @@ -0,0 +1,21 @@ +#!/usr/bin/env python +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.web import group, database +from delphixpy import job_context +group_name = "Dev Copies" +#database_name = "Employee DB - Dev" + +server_session = DelphixEngine("landsharkengine", "delphix_admin", "landshark", "DOMAIN") + +all_groups = group.get_all(server_session) + +for each in all_groups: + if group_name == each.name: + group_reference = each.reference + break + +database_objs = database.get_all(server_session, group=group_reference) + +with job_context.async(server_session): + for obj in database_objs: + database.sync(server_session, obj.reference) diff --git a/v1_8_2/test_dx_authorization.py b/v1_8_2/test_dx_authorization.py new file mode 100755 index 0000000..a29b4d8 --- /dev/null +++ b/v1_8_2/test_dx_authorization.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python + +""" +Unit tests for DVE authorizations +""" + +import unittest +import sys + +import dx_authorization +from lib.GetSession import GetSession + + +class DxAuthorizationTests(unittest.TestCase): + """ + Creates, activates, lists destroys Delphix Authorizations + + Requirements: VDB named dx_vdb, group named Untitled, and user named jsuser. + Change target_vdb, group and user to reflect values in your environment. + """ + + @classmethod + def setUpClass(cls): + super(DxAuthorizationTests, cls).setUpClass() + cls.server_obj = GetSession() + cls.server_obj.serversess('172.16.169.146', 'delphix_admin', + 'delphix', 'DOMAIN') + cls.server_obj.dlpx_engines['engine_name'] = 'test_engine' + cls.user = 'jsuser' + cls.target_vdb = 'dx_vdb' + cls.group = 'Untitled' + cls.target_type_db = 'database' + cls.target_type_group = 'group' + cls.role_data = 'Data' + cls.role_read = 'Read' + cls.role_owner = 'OWNER' + + def test_create_authorization_group(self): + dx_authorization.create_authorization(self.server_obj, self.role_data, + self.target_type_group, + self.group, self.user) + self.assertIn('created for {}'.format(self.user), + sys.stdout.getvalue().strip()) + + def test_create_authorization_database(self): + dx_authorization.create_authorization(self.server_obj, self.role_data, + self.target_type_db, + self.target_vdb, self.user) + self.assertIn('created for {}'.format(self.user), + sys.stdout.getvalue().strip()) + + def test_lists_dx_authorizations(self): + dx_authorization.list_authorization(self.server_obj) + self.assertIn('sysadmin', sys.stdout.getvalue().strip()) + + @classmethod + def tearDownClass(cls): + super(DxAuthorizationTests, cls).tearDownClass() + cls.server_obj = GetSession() + cls.server_obj.serversess('172.16.169.146', 'delphix_admin', + 'delphix', 'DOMAIN') + cls.user = 'jsuser' + cls.target_vdb = 'dx_vdb' + cls.group = 'Untitled' + cls.target_type_db = 'database' + cls.target_type_group = 'group' + cls.role_data = 'Data' + cls.role_read = 'Read' + cls.role_owner = 'OWNER' + dx_authorization.delete_authorization(cls.server_obj, cls.role_data, + cls.target_type_db, + cls.target_vdb, cls.user) + dx_authorization.delete_authorization(cls.server_obj, cls.role_data, + cls.target_type_group, cls.group, + cls.user) + + +# Run the test case +if __name__ == '__main__': + unittest.main(buffer=True) \ No newline at end of file diff --git a/v1_8_2/test_dx_operation.py b/v1_8_2/test_dx_operation.py new file mode 100755 index 0000000..3935822 --- /dev/null +++ b/v1_8_2/test_dx_operation.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python + +""" +Unit tests for DVE operations +""" + +import unittest +import sys + +import dx_operations +from lib.GetSession import GetSession + + +class DxOperationsTests(unittest.TestCase): + """ + Creates, activates, lists destroys Delphix Authorizations + + Requirements: VDB named dx_vdb. + Change target_vdb to reflect values in your environment. + """ + + @classmethod + def setUpClass(cls): + super(DxOperationsTests, cls).setUpClass() + cls.server_obj = GetSession() + cls.server_obj.serversess('172.16.169.146', 'delphix_admin', + 'delphix', 'DOMAIN') + cls.server_obj.dlpx_engines['engine_name'] = 'test_engine' + cls.target_vdb = 'dx_vdb' + + def test_operation_functionality(self): + operations = ['stop', 'start', 'disable', 'enable'] + for op in operations: + dx_operations.dx_obj_operation(self.server_obj, self.target_vdb, op) + self.assertIn('{} was successfully'.format(op), + sys.stdout.getvalue().strip()) + + def test_lists_dx_authorizations(self): + dx_operations.list_databases(self.server_obj) + self.assertIn(self.target_vdb, sys.stdout.getvalue().strip()) + +# Run the test case +if __name__ == '__main__': + unittest.main(module=__name__, buffer=True) \ No newline at end of file diff --git a/v1_8_2/test_js_bookmarks.py b/v1_8_2/test_js_bookmarks.py new file mode 100755 index 0000000..a126a80 --- /dev/null +++ b/v1_8_2/test_js_bookmarks.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python + +""" +Unit tests for Jet Stream delphixpy +""" + +import unittest +import sys + +import js_bookmark +from lib.GetSession import GetSession + +VERSION = '0.0.0.1' + +class JetStreamBookmarkTests(unittest.TestCase): + """ + Creates, lists, shares/unshares JS Bookmarks. + + Requirements: data_layout named jstemplate3 exists on the engine. + Change data_layout to reflect values in your environment. + """ + + @classmethod + def setUpClass(cls): + super(JetStreamBookmarkTests, cls).setUpClass() + cls.server_obj = GetSession() + cls.server_obj.serversess('172.16.169.146', 'delphix_admin', + 'delphix', 'DOMAIN') + cls.server_obj.dlpx_engines['engine_name'] = 'test_engine' + cls.data_layout = 'jscontainer' + cls.branch_name = 'default' + cls.bookmark_name = 'js_test_bookmark' + js_bookmark.create_bookmark(cls.server_obj, cls.bookmark_name, + cls.data_layout, cls.branch_name) + + def test_unshares_js_bookmark(self): + js_bookmark.unshare_bookmark(self.server_obj, self.bookmark_name) + self.assertIn('{} was unshared'.format(self.bookmark_name), + sys.stdout.getvalue().strip()) + + def test_shares_js_bookmark(self): + js_bookmark.share_bookmark(self.server_obj, self.bookmark_name) + self.assertIn('{} was shared'.format(self.bookmark_name), + sys.stdout.getvalue().strip()) + + def test_lists_js_bookmarks(self): + js_bookmark.list_bookmarks(self.server_obj) + self.assertIn('Name, Reference, Branch'.format(self.bookmark_name), + sys.stdout.getvalue().strip()) + + @classmethod + def tearDownClass(cls): + super(JetStreamBookmarkTests, cls).tearDownClass() + cls.server_obj = GetSession() + cls.server_obj.serversess('172.16.169.146', 'delphix_admin', + 'delphix', 'DOMAIN') + cls.bookmark_name = 'js_test_bookmark' + js_bookmark.delete_bookmark(cls.server_obj, cls.bookmark_name) + + +# Run the test case +if __name__ == '__main__': + unittest.main(module=__name__, buffer=True) \ No newline at end of file diff --git a/v1_8_2/test_js_branches.py b/v1_8_2/test_js_branches.py new file mode 100755 index 0000000..e306b27 --- /dev/null +++ b/v1_8_2/test_js_branches.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +""" +Unit tests for Jet Stream delphixpy +""" + +import unittest +import sys + +import js_branch +import js_template +import js_container +from lib.GetSession import GetSession + + +class JetStreamBranchTests(unittest.TestCase): + """ + Creates, activates, lists destroys JS Branches + + Requirements: Parent VDB named jst3, and child VDB named jst3_cld. + Change template_db and database_name to reflect values in your environment. + """ + + @classmethod + def setUpClass(cls): + super(JetStreamBranchTests, cls).setUpClass() + cls.server_obj = GetSession() + cls.server_obj.serversess('172.16.169.146', 'delphix_admin', + 'delphix', 'DOMAIN') + cls.server_obj.dlpx_engines['engine_name'] = 'test_engine' + cls.container_name = 'js_test_container0001' + cls.branch_name = 'js_test_branch0001' + cls.template_name = 'js_test_template0001' + cls.template_db = 'jst3' + cls.database_name = 'jst3_cld' + js_template.create_template(cls.server_obj, cls.template_name, + cls.template_db) + js_container.create_container(cls.server_obj, cls.template_name, + cls.container_name, cls.database_name) + js_branch.create_branch(cls.server_obj, cls.branch_name, + cls.template_name, cls.container_name) + + def test_activate_js_branch(self): + original_branch = 'default' + js_branch.activate_branch(self.server_obj, original_branch) + self.assertIn(original_branch, sys.stdout.getvalue().strip()) + + def test_lists_js_branches(self): + js_branch.list_branches(self.server_obj) + self.assertIn('Branch Name, Data Layout'.format(self.branch_name), + sys.stdout.getvalue().strip()) + + @classmethod + def tearDownClass(cls): + super(JetStreamBranchTests, cls).tearDownClass() + cls.server_obj = GetSession() + cls.server_obj.serversess('172.16.169.146', 'delphix_admin', + 'delphix', 'DOMAIN') + cls.branch_name = 'js_test_branch0001' + cls.container_name = 'js_test_container0001' + cls.template_name = 'js_test_template0001' + js_branch.delete_branch(cls.server_obj, cls.branch_name) + js_container.delete_container(cls.server_obj, cls.container_name, True) + js_template.delete_template(cls.server_obj, cls.template_name) + + +# Run the test case +if __name__ == '__main__': + unittest.main(buffer=True) diff --git a/v1_8_2/test_js_containers.py b/v1_8_2/test_js_containers.py new file mode 100755 index 0000000..eb24189 --- /dev/null +++ b/v1_8_2/test_js_containers.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python + +""" +Unit tests for Jet Stream delphixpy +""" + +import unittest +import sys + +import js_container +import js_template +import js_bookmark +from lib.GetSession import GetSession + + +class JetStreamContainerTests(unittest.TestCase): + """ + Creates, lists, adds/removes users to JS Containers. + + Requirements: Parent VDB named jst3, child VDB named jst3_cld and a + user named jsuser. + Change template_db, database_name and owner_name to reflect values in + your environment. + """ + + @classmethod + def setUpClass(cls): + super(JetStreamContainerTests, cls).setUpClass() + cls.server_obj = GetSession() + cls.server_obj.serversess('172.16.169.146', 'delphix_admin', + 'delphix', 'DOMAIN') + cls.server_obj.dlpx_engines['engine_name'] = 'test_engine' + cls.container_name = 'js_test_container0001' + cls.database_name = 'jst3_cld' + cls.template_db = 'jst3' + cls.bookmark_name = 'js_test_bookmark0001' + cls.template_name = 'js_test_template0001' + + js_template.create_template(cls.server_obj, cls.template_name, + cls.template_db) + js_container.create_container(cls.server_obj, cls.template_name, + cls.container_name, cls.database_name) + js_bookmark.create_bookmark(cls.server_obj, cls.bookmark_name, + cls.template_name) + + def test_adds_removes_js_user(self): + owner_name = 'jsuser' + js_container.add_owner(self.server_obj, owner_name, + self.container_name) + self.assertIn(owner_name, sys.stdout.getvalue().strip()) + + js_container.remove_owner(self.server_obj, owner_name, + self.container_name) + self.assertIn(owner_name, sys.stdout.getvalue().strip()) + + def test_lists_js_containers(self): + js_container.list_containers(self.server_obj) + self.assertIn(self.container_name, sys.stdout.getvalue().strip()) + + def test_lists_hierarchy_js_containers(self): + js_container.list_hierarchy(self.server_obj, self.container_name) + self.assertIn(self.database_name, sys.stdout.getvalue().strip()) + + def test_refreshes_js_containers(self): + js_container.refresh_container(self.server_obj, self.container_name) + self.assertIn(self.container_name, sys.stdout.getvalue().strip()) + + def test_restore_js_container_to_bookmark(self): + js_container.restore_container(self.server_obj, self.container_name, + self.bookmark_name) + self.assertIn(self.container_name, sys.stdout.getvalue().strip()) + + @classmethod + def tearDownClass(cls): + super(JetStreamContainerTests, cls).tearDownClass() + cls.server_obj = GetSession() + cls.container_name = 'js_test_container0001' + cls.server_obj.serversess('172.16.169.146', 'delphix_admin', + 'delphix', 'DOMAIN') + cls.container_name = 'js_test_container0001' + cls.template_name = 'js_test_template0001' + js_container.delete_container(cls.server_obj, cls.container_name, True) + js_template.delete_template(cls.server_obj, cls.template_name) + + + +# Run the test case +if __name__ == '__main__': + unittest.main(buffer=True) \ No newline at end of file diff --git a/v1_8_2/test_js_templates.py b/v1_8_2/test_js_templates.py new file mode 100755 index 0000000..b38094a --- /dev/null +++ b/v1_8_2/test_js_templates.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python + +""" +Unit tests for Jet Stream delphixpy +""" + +import unittest +import sys + +import js_template +from lib.GetSession import GetSession + +VERSION = '0.0.0.015' + +class JetStreamTemplateTests(unittest.TestCase): + """ + Creates, lists, deletes JS Templates + + Requirements: Parent VDB named jst3. + Change database_name to reflect values in your environment. + """ + + @classmethod + def setUpClass(cls): + super(JetStreamTemplateTests, cls).setUpClass() + cls.server_obj = GetSession() + cls.server_obj.serversess('172.16.169.146', 'delphix_admin', + 'delphix', 'DOMAIN') + cls.server_obj.dlpx_engines['engine_name'] = 'test_engine' + cls.database_name = 'jst3' + cls.template_name = 'js_test_template0001' + js_template.create_template(cls.server_obj, cls.template_name, + cls.database_name) + + def test_lists_js_templates(self): + js_template.list_templates(self.server_obj) + self.assertIn(self.template_name, sys.stdout.getvalue().strip()) + + @classmethod + def tearDownClass(cls): + super(JetStreamTemplateTests, cls).tearDownClass() + cls.server_obj = GetSession() + cls.server_obj.serversess('172.16.169.146', 'delphix_admin', + 'delphix', 'DOMAIN') + cls.template_name = 'js_test_template0001' + js_template.delete_template(cls.server_obj, cls.template_name) + + +# Run the test case +if __name__ == '__main__': + unittest.main(module=__name__, buffer=True) diff --git a/v1_8_2/trigger_replication.py b/v1_8_2/trigger_replication.py new file mode 100755 index 0000000..23898b1 --- /dev/null +++ b/v1_8_2/trigger_replication.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +#Adam Bowen Sept 2016 +VERSION="v.0.0.002" +#just a quick and dirty example of executing a replication profile + +from delphixpy.delphix_engine import DelphixEngine +from delphixpy.web import replication +from delphixpy.web.vo import ReplicationSpec + +engine_address = "192.168.218.177" +engine_username = "delphix_admin" +engine_password = "landshark" + +replication_profile_name = "Example Replication Profile" + +def serversess(f_engine_address, f_engine_username, f_engine_password): + """ + Function to setup the session with the Delphix Engine + """ + server_session= DelphixEngine(f_engine_address, f_engine_username, f_engine_password, "DOMAIN") + return server_session + +def find_obj_by_name(server, f_class, obj_name): + """ + Function to find objects by name and object class, and return object's reference as a string + You might use this function to find objects like groups. + """ + print "Searching objects in the " + f_class.__name__ + " class\n for one named \"" + obj_name +"\"" + obj_ref = '' + + all_objs = f_class.get_all(server) + for obj in all_objs: + if obj.name == obj_name: + print "Found a match " + str(obj.reference) + return obj + + +server = serversess(engine_address, engine_username, engine_password) + +replication_list=replication.spec.get_all(server) + +print "##### REPLICATION LIST #######" +for obj in replication_list: + print obj.name +print "##### END REPLICATION LIST #######" + +replication_spec = find_obj_by_name(server, replication.spec, replication_profile_name) + +print "##### REPLICATION PROFILE: " + replication_profile_name +" #######" +print replication_spec.reference + +print "Executing " + replication_profile_name + +replication.spec.execute(server, replication_spec.reference) + +print replication_profile_name + " executed." From 2cfde98dd753f382bcc950fb9e4d246688dd262b Mon Sep 17 00:00:00 2001 From: Corey Brune Date: Sun, 17 Sep 2017 15:01:18 -0500 Subject: [PATCH 3/6] Added requirements.txt --- requirements.txt => v1_8_0/requirements.txt | 0 v1_8_2/requirements.txt | 4 ++++ 2 files changed, 4 insertions(+) rename requirements.txt => v1_8_0/requirements.txt (100%) create mode 100644 v1_8_2/requirements.txt diff --git a/requirements.txt b/v1_8_0/requirements.txt similarity index 100% rename from requirements.txt rename to v1_8_0/requirements.txt diff --git a/v1_8_2/requirements.txt b/v1_8_2/requirements.txt new file mode 100644 index 0000000..deed047 --- /dev/null +++ b/v1_8_2/requirements.txt @@ -0,0 +1,4 @@ +setuptools +pip +docopt +delphixpy \ No newline at end of file From 0b3abcd0bf9d45f42e8c0128313695c23c3a3bba Mon Sep 17 00:00:00 2001 From: Corey Brune Date: Thu, 28 Sep 2017 12:33:27 -0500 Subject: [PATCH 4/6] Move v1_8_2 to latest --- {v1_8_2 => latest}/add_windows_env.py | 0 {v1_8_2 => latest}/delphix_admin_setup.py | 0 {v1_8_2 => latest}/delphix_snapshot_group_will_plugin.py | 0 {v1_8_2 => latest}/delphix_will_plugin.py | 0 {v1_8_2 => latest}/dx_authorization.py | 0 {v1_8_2 => latest}/dx_database.py | 0 {v1_8_2 => latest}/dx_delete_vdb.py | 0 {v1_8_2 => latest}/dx_environment.py | 0 {v1_8_2 => latest}/dx_groups.py | 0 {v1_8_2 => latest}/dx_jetstream_container.py | 0 {v1_8_2 => latest}/dx_jobs.py | 0 {v1_8_2 => latest}/dx_operations.py | 0 {v1_8_2 => latest}/dx_operations_vdb.py | 0 {v1_8_2 => latest}/dx_operations_vdb_orig.py | 0 {v1_8_2 => latest}/dx_provision_dsource.py | 0 {v1_8_2 => latest}/dx_provision_vdb.py | 0 {v1_8_2 => latest}/dx_refresh_db.py | 0 {v1_8_2 => latest}/dx_replication.py | 0 {v1_8_2 => latest}/dx_rewind_vdb.py | 0 {v1_8_2 => latest}/dx_skel.py | 0 {v1_8_2 => latest}/dx_snapshot_db.py | 0 {v1_8_2 => latest}/dx_update_env.py | 0 {v1_8_2 => latest}/dx_users.py | 0 {v1_8_2 => latest}/dxtools.conf | 0 {v1_8_2 => latest}/engine_network_assignment.py | 0 {v1_8_2 => latest}/engine_setup.py | 0 {v1_8_2 => latest}/find_missing_archivelogs.py | 0 {v1_8_2 => latest}/get_engine_pub_key.py | 0 {v1_8_2 => latest}/js_bookmark.py | 0 {v1_8_2 => latest}/js_branch.py | 0 {v1_8_2 => latest}/js_container.py | 0 {v1_8_2 => latest}/js_template.py | 0 {v1_8_2 => latest}/lib/DlpxException.py | 0 {v1_8_2 => latest}/lib/DxLogging.py | 0 {v1_8_2 => latest}/lib/DxTimeflow.py | 0 {v1_8_2 => latest}/lib/GetReferences.py | 0 {v1_8_2 => latest}/lib/GetSession.py | 0 {v1_8_2 => latest}/lib/__init__.py | 0 {v1_8_2 => latest}/list_all_databases.py | 0 {v1_8_2 => latest}/requirements.txt | 0 {v1_8_2 => latest}/simple_snapshot.py | 0 {v1_8_2 => latest}/snapshot_group.py | 0 {v1_8_2 => latest}/test_dx_authorization.py | 0 {v1_8_2 => latest}/test_dx_operation.py | 0 {v1_8_2 => latest}/test_js_bookmarks.py | 0 {v1_8_2 => latest}/test_js_branches.py | 0 {v1_8_2 => latest}/test_js_containers.py | 0 {v1_8_2 => latest}/test_js_templates.py | 0 {v1_8_2 => latest}/trigger_replication.py | 0 49 files changed, 0 insertions(+), 0 deletions(-) rename {v1_8_2 => latest}/add_windows_env.py (100%) rename {v1_8_2 => latest}/delphix_admin_setup.py (100%) rename {v1_8_2 => latest}/delphix_snapshot_group_will_plugin.py (100%) rename {v1_8_2 => latest}/delphix_will_plugin.py (100%) rename {v1_8_2 => latest}/dx_authorization.py (100%) rename {v1_8_2 => latest}/dx_database.py (100%) rename {v1_8_2 => latest}/dx_delete_vdb.py (100%) rename {v1_8_2 => latest}/dx_environment.py (100%) rename {v1_8_2 => latest}/dx_groups.py (100%) rename {v1_8_2 => latest}/dx_jetstream_container.py (100%) rename {v1_8_2 => latest}/dx_jobs.py (100%) rename {v1_8_2 => latest}/dx_operations.py (100%) rename {v1_8_2 => latest}/dx_operations_vdb.py (100%) rename {v1_8_2 => latest}/dx_operations_vdb_orig.py (100%) rename {v1_8_2 => latest}/dx_provision_dsource.py (100%) rename {v1_8_2 => latest}/dx_provision_vdb.py (100%) rename {v1_8_2 => latest}/dx_refresh_db.py (100%) rename {v1_8_2 => latest}/dx_replication.py (100%) rename {v1_8_2 => latest}/dx_rewind_vdb.py (100%) rename {v1_8_2 => latest}/dx_skel.py (100%) rename {v1_8_2 => latest}/dx_snapshot_db.py (100%) rename {v1_8_2 => latest}/dx_update_env.py (100%) rename {v1_8_2 => latest}/dx_users.py (100%) rename {v1_8_2 => latest}/dxtools.conf (100%) rename {v1_8_2 => latest}/engine_network_assignment.py (100%) rename {v1_8_2 => latest}/engine_setup.py (100%) rename {v1_8_2 => latest}/find_missing_archivelogs.py (100%) rename {v1_8_2 => latest}/get_engine_pub_key.py (100%) rename {v1_8_2 => latest}/js_bookmark.py (100%) rename {v1_8_2 => latest}/js_branch.py (100%) rename {v1_8_2 => latest}/js_container.py (100%) rename {v1_8_2 => latest}/js_template.py (100%) rename {v1_8_2 => latest}/lib/DlpxException.py (100%) rename {v1_8_2 => latest}/lib/DxLogging.py (100%) rename {v1_8_2 => latest}/lib/DxTimeflow.py (100%) rename {v1_8_2 => latest}/lib/GetReferences.py (100%) rename {v1_8_2 => latest}/lib/GetSession.py (100%) rename {v1_8_2 => latest}/lib/__init__.py (100%) rename {v1_8_2 => latest}/list_all_databases.py (100%) rename {v1_8_2 => latest}/requirements.txt (100%) rename {v1_8_2 => latest}/simple_snapshot.py (100%) rename {v1_8_2 => latest}/snapshot_group.py (100%) rename {v1_8_2 => latest}/test_dx_authorization.py (100%) rename {v1_8_2 => latest}/test_dx_operation.py (100%) rename {v1_8_2 => latest}/test_js_bookmarks.py (100%) rename {v1_8_2 => latest}/test_js_branches.py (100%) rename {v1_8_2 => latest}/test_js_containers.py (100%) rename {v1_8_2 => latest}/test_js_templates.py (100%) rename {v1_8_2 => latest}/trigger_replication.py (100%) diff --git a/v1_8_2/add_windows_env.py b/latest/add_windows_env.py similarity index 100% rename from v1_8_2/add_windows_env.py rename to latest/add_windows_env.py diff --git a/v1_8_2/delphix_admin_setup.py b/latest/delphix_admin_setup.py similarity index 100% rename from v1_8_2/delphix_admin_setup.py rename to latest/delphix_admin_setup.py diff --git a/v1_8_2/delphix_snapshot_group_will_plugin.py b/latest/delphix_snapshot_group_will_plugin.py similarity index 100% rename from v1_8_2/delphix_snapshot_group_will_plugin.py rename to latest/delphix_snapshot_group_will_plugin.py diff --git a/v1_8_2/delphix_will_plugin.py b/latest/delphix_will_plugin.py similarity index 100% rename from v1_8_2/delphix_will_plugin.py rename to latest/delphix_will_plugin.py diff --git a/v1_8_2/dx_authorization.py b/latest/dx_authorization.py similarity index 100% rename from v1_8_2/dx_authorization.py rename to latest/dx_authorization.py diff --git a/v1_8_2/dx_database.py b/latest/dx_database.py similarity index 100% rename from v1_8_2/dx_database.py rename to latest/dx_database.py diff --git a/v1_8_2/dx_delete_vdb.py b/latest/dx_delete_vdb.py similarity index 100% rename from v1_8_2/dx_delete_vdb.py rename to latest/dx_delete_vdb.py diff --git a/v1_8_2/dx_environment.py b/latest/dx_environment.py similarity index 100% rename from v1_8_2/dx_environment.py rename to latest/dx_environment.py diff --git a/v1_8_2/dx_groups.py b/latest/dx_groups.py similarity index 100% rename from v1_8_2/dx_groups.py rename to latest/dx_groups.py diff --git a/v1_8_2/dx_jetstream_container.py b/latest/dx_jetstream_container.py similarity index 100% rename from v1_8_2/dx_jetstream_container.py rename to latest/dx_jetstream_container.py diff --git a/v1_8_2/dx_jobs.py b/latest/dx_jobs.py similarity index 100% rename from v1_8_2/dx_jobs.py rename to latest/dx_jobs.py diff --git a/v1_8_2/dx_operations.py b/latest/dx_operations.py similarity index 100% rename from v1_8_2/dx_operations.py rename to latest/dx_operations.py diff --git a/v1_8_2/dx_operations_vdb.py b/latest/dx_operations_vdb.py similarity index 100% rename from v1_8_2/dx_operations_vdb.py rename to latest/dx_operations_vdb.py diff --git a/v1_8_2/dx_operations_vdb_orig.py b/latest/dx_operations_vdb_orig.py similarity index 100% rename from v1_8_2/dx_operations_vdb_orig.py rename to latest/dx_operations_vdb_orig.py diff --git a/v1_8_2/dx_provision_dsource.py b/latest/dx_provision_dsource.py similarity index 100% rename from v1_8_2/dx_provision_dsource.py rename to latest/dx_provision_dsource.py diff --git a/v1_8_2/dx_provision_vdb.py b/latest/dx_provision_vdb.py similarity index 100% rename from v1_8_2/dx_provision_vdb.py rename to latest/dx_provision_vdb.py diff --git a/v1_8_2/dx_refresh_db.py b/latest/dx_refresh_db.py similarity index 100% rename from v1_8_2/dx_refresh_db.py rename to latest/dx_refresh_db.py diff --git a/v1_8_2/dx_replication.py b/latest/dx_replication.py similarity index 100% rename from v1_8_2/dx_replication.py rename to latest/dx_replication.py diff --git a/v1_8_2/dx_rewind_vdb.py b/latest/dx_rewind_vdb.py similarity index 100% rename from v1_8_2/dx_rewind_vdb.py rename to latest/dx_rewind_vdb.py diff --git a/v1_8_2/dx_skel.py b/latest/dx_skel.py similarity index 100% rename from v1_8_2/dx_skel.py rename to latest/dx_skel.py diff --git a/v1_8_2/dx_snapshot_db.py b/latest/dx_snapshot_db.py similarity index 100% rename from v1_8_2/dx_snapshot_db.py rename to latest/dx_snapshot_db.py diff --git a/v1_8_2/dx_update_env.py b/latest/dx_update_env.py similarity index 100% rename from v1_8_2/dx_update_env.py rename to latest/dx_update_env.py diff --git a/v1_8_2/dx_users.py b/latest/dx_users.py similarity index 100% rename from v1_8_2/dx_users.py rename to latest/dx_users.py diff --git a/v1_8_2/dxtools.conf b/latest/dxtools.conf similarity index 100% rename from v1_8_2/dxtools.conf rename to latest/dxtools.conf diff --git a/v1_8_2/engine_network_assignment.py b/latest/engine_network_assignment.py similarity index 100% rename from v1_8_2/engine_network_assignment.py rename to latest/engine_network_assignment.py diff --git a/v1_8_2/engine_setup.py b/latest/engine_setup.py similarity index 100% rename from v1_8_2/engine_setup.py rename to latest/engine_setup.py diff --git a/v1_8_2/find_missing_archivelogs.py b/latest/find_missing_archivelogs.py similarity index 100% rename from v1_8_2/find_missing_archivelogs.py rename to latest/find_missing_archivelogs.py diff --git a/v1_8_2/get_engine_pub_key.py b/latest/get_engine_pub_key.py similarity index 100% rename from v1_8_2/get_engine_pub_key.py rename to latest/get_engine_pub_key.py diff --git a/v1_8_2/js_bookmark.py b/latest/js_bookmark.py similarity index 100% rename from v1_8_2/js_bookmark.py rename to latest/js_bookmark.py diff --git a/v1_8_2/js_branch.py b/latest/js_branch.py similarity index 100% rename from v1_8_2/js_branch.py rename to latest/js_branch.py diff --git a/v1_8_2/js_container.py b/latest/js_container.py similarity index 100% rename from v1_8_2/js_container.py rename to latest/js_container.py diff --git a/v1_8_2/js_template.py b/latest/js_template.py similarity index 100% rename from v1_8_2/js_template.py rename to latest/js_template.py diff --git a/v1_8_2/lib/DlpxException.py b/latest/lib/DlpxException.py similarity index 100% rename from v1_8_2/lib/DlpxException.py rename to latest/lib/DlpxException.py diff --git a/v1_8_2/lib/DxLogging.py b/latest/lib/DxLogging.py similarity index 100% rename from v1_8_2/lib/DxLogging.py rename to latest/lib/DxLogging.py diff --git a/v1_8_2/lib/DxTimeflow.py b/latest/lib/DxTimeflow.py similarity index 100% rename from v1_8_2/lib/DxTimeflow.py rename to latest/lib/DxTimeflow.py diff --git a/v1_8_2/lib/GetReferences.py b/latest/lib/GetReferences.py similarity index 100% rename from v1_8_2/lib/GetReferences.py rename to latest/lib/GetReferences.py diff --git a/v1_8_2/lib/GetSession.py b/latest/lib/GetSession.py similarity index 100% rename from v1_8_2/lib/GetSession.py rename to latest/lib/GetSession.py diff --git a/v1_8_2/lib/__init__.py b/latest/lib/__init__.py similarity index 100% rename from v1_8_2/lib/__init__.py rename to latest/lib/__init__.py diff --git a/v1_8_2/list_all_databases.py b/latest/list_all_databases.py similarity index 100% rename from v1_8_2/list_all_databases.py rename to latest/list_all_databases.py diff --git a/v1_8_2/requirements.txt b/latest/requirements.txt similarity index 100% rename from v1_8_2/requirements.txt rename to latest/requirements.txt diff --git a/v1_8_2/simple_snapshot.py b/latest/simple_snapshot.py similarity index 100% rename from v1_8_2/simple_snapshot.py rename to latest/simple_snapshot.py diff --git a/v1_8_2/snapshot_group.py b/latest/snapshot_group.py similarity index 100% rename from v1_8_2/snapshot_group.py rename to latest/snapshot_group.py diff --git a/v1_8_2/test_dx_authorization.py b/latest/test_dx_authorization.py similarity index 100% rename from v1_8_2/test_dx_authorization.py rename to latest/test_dx_authorization.py diff --git a/v1_8_2/test_dx_operation.py b/latest/test_dx_operation.py similarity index 100% rename from v1_8_2/test_dx_operation.py rename to latest/test_dx_operation.py diff --git a/v1_8_2/test_js_bookmarks.py b/latest/test_js_bookmarks.py similarity index 100% rename from v1_8_2/test_js_bookmarks.py rename to latest/test_js_bookmarks.py diff --git a/v1_8_2/test_js_branches.py b/latest/test_js_branches.py similarity index 100% rename from v1_8_2/test_js_branches.py rename to latest/test_js_branches.py diff --git a/v1_8_2/test_js_containers.py b/latest/test_js_containers.py similarity index 100% rename from v1_8_2/test_js_containers.py rename to latest/test_js_containers.py diff --git a/v1_8_2/test_js_templates.py b/latest/test_js_templates.py similarity index 100% rename from v1_8_2/test_js_templates.py rename to latest/test_js_templates.py diff --git a/v1_8_2/trigger_replication.py b/latest/trigger_replication.py similarity index 100% rename from v1_8_2/trigger_replication.py rename to latest/trigger_replication.py From 0e0f4ec9ff8d05f54d03b20703681b043d268719 Mon Sep 17 00:00:00 2001 From: Corey Brune Date: Tue, 26 Dec 2017 09:29:44 -0600 Subject: [PATCH 5/6] Added provisioning VDBs to Oracle RAC cluster --- v1_8_0/dx_provision_vdb.py | 483 ++++++++++++++++++------------------ v1_8_0/lib/GetReferences.py | 4 +- v1_8_0/requirements.txt | 3 +- 3 files changed, 244 insertions(+), 246 deletions(-) diff --git a/v1_8_0/dx_provision_vdb.py b/v1_8_0/dx_provision_vdb.py index dcdfe71..377def4 100755 --- a/v1_8_0/dx_provision_vdb.py +++ b/v1_8_0/dx_provision_vdb.py @@ -34,12 +34,19 @@ Provision VDB from a defined source on the defined target environment. Examples: - dx_provision_vdb.py --engine landsharkengine --source_grp Sources --source "ASE pubs3 DB" --db vase --target testASE --target_grp Analytics --environment LINUXTARGET --type ase --envinst "LINUXTARGET" +ASE + dx_provision_vdb.py --engine landsharkengine --source "ASE pubs3 DB" --db vase --target testASE --target_grp Analytics --environment LINUXTARGET --type ase --envinst "LINUXTARGET" - dx_provision_vdb.py --source_grp Sources --source "Employee Oracle 11G DB" --instname autod --uniqname autoprod --db autoprod --target autoprod --target_grp Analytics --environment LINUXTARGET --type oracle --envinst "/u01/app/oracle/product/11.2.0/dbhome_1" +Oracle Single Instance + dx_provision_vdb.py --source "Employee Oracle 11G DB" --instname autod --uniqname autoprod --db autoprod --target autoprod --target_grp Analytics --environment LINUXTARGET --type oracle --envinst "/u01/app/oracle/product/11.2.0/dbhome_1" - dx_provision_vdb.py --source_grp Sources --source "AdventureWorksLT2008R2" --db vAW --target testAW --target_grp Analytics --environment WINDOWSTARGET --type mssql --envinst MSSQLSERVER --all +Oracle RAC + dx_provision_vdb.py --source labtest --instname autod --uniqname autod --db autod --target autod --target_grp VDBs --environment 11gRAC_Target --type oraclecluster --envinst "/u01/app/oracle/product/11.2/db_1" +MS SQL + dx_provision_vdb.py --source "AdventureWorksLT2008R2" --db vAW --target testAW --target_grp Analytics --environment WINDOWSTARGET --type mssql --envinst MSSQLSERVER --all + +vFiles VDB dx_provision_vdb.py --source UF_Source --target appDataVDB --target_grp Untitled --environment LinuxTarget --type vfiles --vfiles_path /mnt/provision/appDataVDB --prerollback "/u01/app/oracle/product/scripts/PreRollback.sh" --postrollback "/u01/app/oracle/product/scripts/PostRollback.sh" --vdb_restart true Options: @@ -54,7 +61,7 @@ --no_truncate_log Don't truncate log on checkpoint (ASE only) --environment The name of the Target environment in Delphix --type The type of VDB this is. - oracle | mssql | ase | vfiles + oracle | oraclecluster | mssql | ase | vfiles --prerefresh Pre-Hook commands --postrefresh Post-Hook commands --prerollback Post-Hook commands @@ -98,7 +105,7 @@ -v --version Show version. """ -VERSION = 'v.0.2.305' +VERSION = 'v.0.2.401' import signal import sys @@ -124,8 +131,10 @@ from delphixpy.v1_8_0.web.vo import VirtualSourceOperations from delphixpy.v1_8_0.web.vo import OracleDatabaseContainer from delphixpy.v1_8_0.web.vo import OracleInstance +from delphixpy.v1_8_0.web.vo import OracleRACInstance from delphixpy.v1_8_0.web.vo import OracleProvisionParameters from delphixpy.v1_8_0.web.vo import OracleSIConfig +from delphixpy.v1_8_0.web.vo import OracleRACConfig from delphixpy.v1_8_0.web.vo import OracleVirtualSource from delphixpy.v1_8_0.web.vo import TimeflowPointLocation from delphixpy.v1_8_0.web.vo import TimeflowPointSemantic @@ -142,6 +151,7 @@ from delphixpy.v1_8_0.web.vo import AppDataVirtualSource from delphixpy.v1_8_0.web.vo import AppDataProvisionParameters from delphixpy.v1_8_0.web.vo import AppDataDirectSourceConfig +from delphixpy.v1_8_0.web.environment.oracle import clusternode from lib.DxTimeflow import DxTimeflow from lib.DlpxException import DlpxException @@ -151,13 +161,21 @@ from lib.DxLogging import logging_est from lib.DxLogging import print_info from lib.DxLogging import print_debug +from lib.DxLogging import print_exception def create_ase_vdb(engine, server, jobs, vdb_group, vdb_name, environment_obj, container_obj): - ''' + """ Create a Sybase ASE VDB - ''' + :param engine: dictionary of engines + :param server: Virtualization Engine session object + :param jobs: list of running jobs + :param vdb_group: VDB provisioning group + :param vdb_name: Name of VDB + :param environment_obj: Environment object + :param container_obj: Source DB Container object + """ vdb_obj = find_database_by_name_and_group_name(engine, server, vdb_group.name, vdb_name) if vdb_obj == None: @@ -175,14 +193,14 @@ def create_ase_vdb(engine, server, jobs, vdb_group, vdb_name, environment_obj, vdb_params.source_config.instance = ASEInstanceConfig() vdb_params.source_config.instance.host = environment_obj.host - vdb_repo = find_dbrepo_by_environment_ref_and_name(engine, server, - "ASEInstance", + vdb_repo = find_dbrepo_by_environment_ref_and_name(engine, server, + "ASEInstance", environment_obj.reference, arguments['--envinst']) vdb_params.source_config.repository = vdb_repo.reference - vdb_params.timeflow_point_parameters = set_timeflow_point(engine, - server, + vdb_params.timeflow_point_parameters = set_timeflow_point(engine, + server, container_obj) vdb_params.timeflow_point_parameters.container = container_obj.reference @@ -199,19 +217,19 @@ def create_ase_vdb(engine, server, jobs, vdb_group, vdb_name, environment_obj, return vdb_obj.reference -def create_mssql_vdb(engine, jobs, vdb_group, vdb_name, - environment_obj, container_obj): - ''' +def create_mssql_vdb(engine, server, jobs, vdb_group, vdb_name, environment_obj, + container_obj): + """ Create a MSSQL VDB - engine: - jobs: - vdb_group: - vdb_name, - environment_obj: - container_obj: - - ''' - vdb_obj = find_database_by_name_and_group_name(engine, dx_session_obj.server_session, + :param engine: dictionary of engines + :param server: Virtualization Engine session object + :param jobs: list of running jobs + :param vdb_group: VDB provisioning group + :param vdb_name: Name of VDB + :param environment_obj: Environment object + :param container_obj: Source DB Container object + """ + vdb_obj = find_database_by_name_and_group_name(engine, server, vdb_group.name, vdb_name) if vdb_obj == None: vdb_params = MSSqlProvisionParameters() @@ -224,12 +242,12 @@ def create_mssql_vdb(engine, jobs, vdb_group, vdb_name, vdb_params.source_config.database_name = arguments['--db'] vdb_params.source_config.repository = find_dbrepo( - dx_session_obj.server_session, 'MSSqlInstance', environment_obj.reference, - arguments['--envinst']).reference + dx_session_obj.server_session, 'MSSqlInstance', + environment_obj.reference, arguments['--envinst']).reference vdb_params.timeflow_point_parameters = set_timeflow_point(engine, - dx_session_obj.server_session, - container_obj) + dx_session_obj.server_session, + container_obj) if not vdb_params.timeflow_point_parameters: return vdb_params.timeflow_point_parameters.container = \ @@ -242,31 +260,40 @@ def create_mssql_vdb(engine, jobs, vdb_group, vdb_name, # a job was created or not (will return None, if no job) return dx_session_obj.server_session.last_job else: - print_info(engine["hostname"] + ": " + vdb_name + " already exists.") + print_info("{}: vdb_name {} already exists.".format(engine["hostname"], + vdb_name)) return vdb_obj.reference -def create_vfiles_vdb(engine, jobs, vfiles_group, vfiles_name, +def create_vfiles_vdb(engine, server, jobs, vfiles_group, vfiles_name, environment_obj, container_obj, pre_refresh=None, post_refresh=None, pre_rollback=None, post_rollback=None, configure_clone=None): - ''' + """ Create a Vfiles VDB - ''' - + :param engine: dictionary of engines + :param server: Virtualization Engine session object + :param jobs: list of running jobs + :param vdb_group: VDB provisioning group + :param vdb_name: Name of VDB + :param environment_obj: Environment object + :param container_obj: Source DB Container object + :param pre_refresh: Pre-Hook commands + :param postrefresh: Post-Hook commands + :param prerollback: Post-Hook commands + :param postrollback: Post-Hook commands + :param configure-clone: Configure Clone commands + """ vfiles_obj = None - try: - vfiles_obj = find_obj_by_name(dx_session_obj.server_session, - database, vfiles_name) + vfiles_obj = find_obj_by_name(server, database, vfiles_name) except DlpxException: pass if vfiles_obj is None: - vfiles_repo = find_repo_by_environment_ref(engine, + vfiles_repo = find_repo_by_environment_ref(engine, server, 'Unstructured Files', environment_obj.reference) - vfiles_params = AppDataProvisionParameters() vfiles_params.source = AppDataVirtualSource() vfiles_params.source_config = AppDataDirectSourceConfig() @@ -337,8 +364,8 @@ def create_vfiles_vdb(engine, jobs, vfiles_group, vfiles_name, arguments['--timeflow']) except RequestError as e: - raise DlpxException('Could not set the timeflow point:\n%s' - % (e)) + print_exception('Could not set the timeflow point:' + '\n{}'.format(e)) if dx_snap_params.type == 'TimeflowPointSemantic': vfiles_params.timeflow_point_parameters = {'type': @@ -356,7 +383,7 @@ def create_vfiles_vdb(engine, jobs, vfiles_group, vfiles_name, 'timestamp': dx_snap_params.timestamp} - print_info('%s: Provisioning %s\n' % (engine["hostname"], + print_info('{}: Provisioning {}\n'.format(engine["hostname"], vfiles_name)) try: @@ -379,24 +406,26 @@ def create_vfiles_vdb(engine, jobs, vfiles_group, vfiles_name, return vfiles_obj.reference -def create_oracle_si_vdb(engine, jobs, vdb_name, vdb_group_obj, - environment_obj, container_obj, pre_refresh=None, - post_refresh=None, pre_rollback=None, - post_rollback=None, configure_clone=None): - - ''' - Create an Oracle SI VDB - ''' +def create_oracle_vdb(engine, server, jobs, vdb_name, vdb_group_obj, + environment_obj, container_obj, dbtype, pre_refresh=None, + post_refresh=None, pre_rollback=None, + post_rollback=None, configure_clone=None): + """ + Create an Oracle VDB + """ vdb_obj = None + db = None + inst_name = None + unique_name = None try: - vdb_obj = find_obj_by_name(dx_session_obj.server_session, database, + vdb_obj = find_obj_by_name(server, database, vdb_name) except DlpxException: pass - if vdb_obj == None: + if vdb_obj is None: vdb_params = OracleProvisionParameters() vdb_params.open_resetlogs = True @@ -411,17 +440,17 @@ def create_oracle_si_vdb(engine, jobs, vdb_name, vdb_group_obj, if arguments['--instname']: inst_name = arguments['--instname'] - elif arguments['--instname'] == None: + elif arguments['--instname'] is None: inst_name = vdb_name if arguments['--uniqname']: unique_name = arguments['--uniqname'] - elif arguments['--uniqname'] == None: + elif arguments['--uniqname'] is None: unique_name = vdb_name if arguments['--db']: db = arguments['--db'] - elif arguments['--db'] == None: + elif arguments['--db'] is None: db = vdb_name vdb_params.source.mount_base = arguments['--mntpoint'] @@ -433,48 +462,60 @@ def create_oracle_si_vdb(engine, jobs, vdb_name, vdb_group_obj, template_obj = find_obj_by_name(dx_session_obj.server_session, database.template, arguments['--template']) - vdb_params.source.config_template = template_obj.reference - vdb_params.source_config = OracleSIConfig() vdb_params.source.operations = VirtualSourceOperations() if pre_refresh: vdb_params.source.operations.pre_refresh = [{ 'type': 'RunCommandOnSourceOperation', 'command': pre_refresh }] - if post_refresh: vdb_params.source.operations.post_refresh = [{ 'type': 'RunCommandOnSourceOperation', 'command': post_refresh }] - if pre_rollback: vdb_params.source.operations.pre_rollback = [{ 'type': 'RunCommandOnSourceOperation', 'command': pre_rollback }] - if post_rollback: vdb_params.source.operations.post_rollback = [{ 'type': 'RunCommandOnSourceOperation', 'command': post_rollback }] - if configure_clone: vdb_params.source.operations.configure_clone = [{ 'type': 'RunCommandOnSourceOperation', 'command': configure_clone }] - vdb_repo = find_dbrepo_by_environment_ref_and_install_path(engine, dx_session_obj.server_session, 'OracleInstall', environment_obj.reference, arguments['--envinst']) + if dbtype == 'oraclecluster': + vdb_params.source_config = OracleRACConfig() + clust_nodes = clusternode.get_all(dx_session_obj.server_session, + environment_obj.reference ) + vdb_params.source_config.instances = [] + inst_number = 1 + #vdb_params.source_config.instances = OracleRACInstance() + for inst in clust_nodes: + inst_name = db + str(inst_number) + vdb_params.source_config.instances.append({'instanceName': + inst_name, 'type': + 'OracleRACInstance', + 'node': + inst.reference, + 'instanceNumber': + inst_number}) + inst_number = inst_number + 1 + elif dbtype == 'oracle': + vdb_params.source_config = OracleSIConfig() + vdb_params.source_config.instance = OracleInstance() + vdb_params.source_config.instance.instance_name = inst_name + vdb_params.source_config.instance.instance_number = 1 vdb_params.source_config.database_name = db vdb_params.source_config.unique_name = unique_name - vdb_params.source_config.instance = OracleInstance() - vdb_params.source_config.instance.instance_name = inst_name - vdb_params.source_config.instance.instance_number = 1 vdb_params.source_config.repository = vdb_repo.reference dx_timeflow_obj = DxTimeflow(dx_session_obj.server_session) @@ -483,23 +524,22 @@ def create_oracle_si_vdb(engine, jobs, vdb_name, vdb_group_obj, arguments['--timestamp_type'], arguments['--timestamp']) - print vdb_params, '\n\n\n' print_info(engine["hostname"] + ": Provisioning " + vdb_name) database.provision(dx_session_obj.server_session, vdb_params) - #Add the job into the jobs dictionary so we can track its progress + #Add the job into the jobs dictionary so we can track its progress jobs[engine['hostname']] = dx_session_obj.server_session.last_job + #return the job object to the calling statement so that we can tell if # a job was created or not (will return None, if no job) - return dx_session_obj.server_session.last_job else: - raise DlpxException('\nERROR: %s: %s alread exists\n' % - (engine['hostname'], vdb_name)) + raise DlpxException('\nERROR: {}: {} alread exists\n'.format( + engine['hostname'], vdb_name)) -def find_all_databases_by_group_name(engine, server, group_name, +def find_all_databases_by_group_name(server, group_name, exclude_js_container=False): """ Easy way to quickly find databases by group name @@ -517,16 +557,16 @@ def find_all_databases_by_group_name(engine, server, group_name, def find_database_by_name_and_group_name(engine, server, group_name, database_name): - databases = find_all_databases_by_group_name(engine, server, group_name) + databases = find_all_databases_by_group_name(server, group_name) for each in databases: if each.name == database_name: - print_debug('%s: Found a match %s' % (engine['hostname'], + print_debug('{}: Found a match {}'.format(engine['hostname'], str(each.reference))) return each - print_info('%s unable to find %s in %s' % (engine['hostname'], - database_name, group_name)) + print_info('{} unable to find {} in {}'.format(engine['hostname'], + database_name, group_name)) def find_dbrepo_by_environment_ref_and_install_path(engine, server, @@ -538,102 +578,91 @@ def find_dbrepo_by_environment_ref_and_install_path(engine, server, install path, and return the object's reference as a string You might use this function to find Oracle and PostGreSQL database repos. ''' - print_debug('%s: Searching objects in the %s class for one with the ' - 'environment reference of %s and an install path of %s' % - (engine['hostname'], install_type, f_environment_ref, + print_debug('{}: Searching objects in the %s class for one with the ' + 'environment reference of %s and an install path of {}'.format( + engine['hostname'], install_type, f_environment_ref, f_install_path), debug) for obj in repository.get_all(server, environment=f_environment_ref): - if install_type == 'PgSQLInstall': - if (obj.type == install_type and - obj.installation_path == f_install_path): - print_debug('%s: Found a match %s' % (engine['hostname'], - str(obj.reference)), debug) - return obj - - elif install_type == 'OracleInstall': - if (obj.type == install_type and - obj.installation_home == f_install_path): - - print_debug('%s: Fount a match %s' % (engine['hostname'], + if install_type == 'OracleInstall': + if obj.type == install_type and \ + obj.installation_home == f_install_path: + print_debug('{}: Found a match {}'.format(engine['hostname'], str(obj.reference)), debug) return obj else: - raise DlpxException('%s: No Repo match found for type %s.\n' % - (engine["hostname"], install_type)) + raise DlpxException('{}: No Repo match found for type {}.'.format( + engine["hostname"], install_type)) -def find_repo_by_environment_ref(engine, repo_type, f_environment_ref, - f_install_path=None): - ''' +def find_repo_by_environment_ref(engine, server, repo_type, f_environment_ref): + """ Function to find unstructured file repository objects by environment reference and name, and return the object's reference as a string You might use this function to find Unstructured File repos. - ''' - - print_debug('\n%s: Searching objects in the %s class for one with the' - 'environment reference of %s\n' % - (engine['hostname'], repo_type, f_environment_ref), debug) + """ - obj_ref = '' - all_objs = repository.get_all(dx_session_obj.server_session, - environment=f_environment_ref) + print_debug('\n{}: Searching objects in the {} class for one with the' + 'environment reference of {}\n'.format( + engine['hostname'], repo_type, f_environment_ref), debug) + all_objs = repository.get_all(server, environment=f_environment_ref) for obj in all_objs: if obj.name == repo_type: - print_debug(engine['hostname'] + ': Found a match ' + + print_debug(engine['hostname'] + ': Found a match ' + str(obj.reference)) - return obj + return obj elif obj.type == repo_type: - print_debug('%s Found a match %s' % (engine['hostname'], - str(obj.reference)), debug) - return obj + print_debug('{} Found a match {}'.format(engine['hostname'], + str(obj.reference)), debug) + return obj - raise DlpxException('%s: No Repo match found for type %s\n' % ( + # Raise an exception if the object isn't found + raise DlpxException('{}: No Repo match found for type {}\n'.format( engine['hostname'], repo_type)) -def find_dbrepo_by_environment_ref_and_name(engine, repo_type, +def find_dbrepo_by_environment_ref_and_name(engine, server, repo_type, f_environment_ref, f_name): - ''' + """ Function to find database repository objects by environment reference and name, and return the object's reference as a string You might use this function to find MSSQL database repos. - ''' + """ - print_debug('%s: Searching objects in the %s class for one with the ' - 'environment reference of %s and a name of %s.' % - (engine['hostname'], repo_type, f_environment_ref, f_name), + print 'yeah i am here' + print 'hello\n\n\n' + print_debug('{}: Searching objects in the {} class for one with the ' + 'environment reference of {} and a name of {}.'.format( + engine['hostname'], repo_type, f_environment_ref, f_name), debug) - - obj_ref = '' all_objs = repository.get_all(server, environment=f_environment_ref) for obj in all_objs: - if (repo_type == 'MSSqlInstance' or repo_type == 'ASEInstance'): - if (obj.type == repo_type and obj.name == f_name): - print_debug('%s: Found a match %s' % (engine['hostname'], + if repo_type == 'MSSqlInstance' or repo_type == 'ASEInstance': + if obj.type == repo_type and obj.name == f_name: + print_debug('{}: Found a match {}'.format(engine['hostname'], str(obj.reference)), debug) return obj elif repo_type == 'Unstructured Files': - if obj.value == install_type: - print_debug('%s: Found a match %s' % (engine['hostname'], + if obj.value == repo_type: + print_debug('{}: Found a match {}'.format(engine['hostname'], str(obj.reference)), debug) return obj - raise DlpxException('%s: No Repo match found for type %s\n' % - (engine['hostname'], repo_type)) + raise DlpxException('{}: No Repo match found for type {}\n'.format( + engine['hostname'], repo_type)) def find_snapshot_by_database_and_name(engine, database_obj, snap_name): """ Find snapshots by database and name. Return snapshot reference. - engine: Dictionary of engines from config file. - database_obj: Database object to find the snapshot against - snap_name: Name of the snapshot + :param engine: Dictionary of engines from config file. + :param database_obj: Database object to find the snapshot against + :param snap_name: Name of the snapshot """ snapshots = snapshot.get_all(dx_session_obj.server_session, @@ -652,13 +681,13 @@ def find_snapshot_by_database_and_name(engine, database_obj, snap_name): return matches[0] elif len(matches) > 1: - raise DlpxException('%s: The name specified was not specific enough.' - ' More than one match found.\n' % - (engine['hostname'],)) + raise DlpxException('{}: The name specified was not specific enough.' + ' More than one match ' + 'found.'.format(engine['hostname'])) else: - raise DlpxException('%s: No matches found for the time specified.\n' - % (engine['hostname'])) + raise DlpxException('{}: No matches found for the time ' + 'specified.'.format(engine['hostname'])) def find_snapshot_by_database_and_time(engine, database_obj, snap_time): @@ -672,38 +701,19 @@ def find_snapshot_by_database_and_time(engine, database_obj, snap_time): matches.append(snapshot_obj) if len(matches) == 1: - print_debug('%s": Found one and only one match. This is good.\n%s' % - (engine['hostname'], matches[0]), debug) - + print_debug('{}": Found one and only one match. This is ' + 'good.\n{}'.format(engine['hostname'], matches[0]), debug) return matches[0] elif len(matches) > 1: print_debug(matches, debug) - raise DlpxException('%s: The time specified was not specific enough.' - 'More than one match found.\n' % - (engine['hostname'])) + raise DlpxException('{}: The time specified was not specific enough.' + 'More than one match ' + 'found.'.format(engine['hostname'])) else: - raise DlpxException('%s: No matches found for the time specified.\n' - % (engine['hostname'])) - - -def find_source_by_database(engine, database_obj): - #The source tells us if the database is enabled/disables, virtual, - # vdb/dSource, or is a staging database. - source_obj = source.get_all(server, database=database_obj.reference) - - #We'll just do a little sanity check here to ensure we only have a 1:1 - # result. - if len(source_obj) == 0: - raise DlpxException('%s: Did not find a source for %s. Exiting.\n' % - (engine['hostname'], database_obj.name)) - - elif len(source_obj) > 1: - raise DlpxException('%s: More than one source returned for %s. ' - 'Exiting.\n' % (engine['hostname'], - database_obj.name + ". Exiting")) - return source_obj + raise DlpxException('{}: No matches found for the time ' + 'specified.'.format(engine['hostname'])) def run_async(func): @@ -745,12 +755,10 @@ def main_workflow(engine): Use the @run_async decorator to run this function asynchronously. This allows us to run against multiple Delphix Engine simultaneously - engine: Dictionary containing engine information + :param engine: Dictionary containing engine information """ #Establish these variables as empty for use later - environment_obj = None - source_objs = None jobs = {} try: @@ -762,19 +770,19 @@ def main_workflow(engine): arguments['--target_grp']) #Get the reference of the target environment. - print_debug('Getting environment for %s\n' % (host_name), debug) + print_debug('Getting environment for {}\n'.format(host_name), debug) #Get the environment object by the hostname - environment_obj = find_obj_by_name(dx_session_obj.server_session, + environment_obj = find_obj_by_name(dx_session_obj.server_session, environment, host_name) except DlpxException as e: - print('\nERROR: Engine %s encountered an error while provisioning ' - '%s:\n%s\n' % (engine['hostname'], arguments['--target'], e)) + print('\nERROR: Engine {} encountered an error while provisioning ' + '{}:\n{}\n'.format(engine['hostname'], arguments['--target'], e)) sys.exit(1) - print_debug('Getting database information for %s\n' % - (arguments['--source']), debug) + print_debug('Getting database information for {}\n'.format( + arguments['--source']), debug) try: #Get the database reference we are copying from the database name database_obj = find_obj_by_name(dx_session_obj.server_session, @@ -783,39 +791,37 @@ def main_workflow(engine): return thingstodo = ["thingtodo"] - #reset the running job count before we begin - i = 0 - try: with dx_session_obj.job_mode(single_thread): while (len(jobs) > 0 or len(thingstodo) > 0): arg_type = arguments['--type'].lower() if len(thingstodo)> 0: - if arg_type == "oracle": - create_oracle_si_vdb(engine, jobs, database_name, - group_obj, environment_obj, - database_obj, - arguments['--prerefresh'], - arguments['--postrefresh'], - arguments['--prerollback'], - arguments['--postrollback'], - arguments['--configure-clone']) + if arg_type in ('oracle', 'oraclecluster'): + create_oracle_vdb(engine, dx_session_obj.server_session, + jobs, database_name, group_obj, + environment_obj, database_obj, + arguments['--type'], + arguments['--prerefresh'], + arguments['--postrefresh'], + arguments['--prerollback'], + arguments['--postrollback'], + arguments['--configure-clone']) elif arg_type == "ase": - create_ase_vdb(engine, server, jobs, group_obj, - database_name, environment_obj, - database_obj) + create_ase_vdb(engine, dx_session_obj.server_session, + jobs, group_obj, database_name, + environment_obj, database_obj) elif arg_type == "mssql": - create_mssql_vdb(engine, jobs, group_obj, - database_name, environment_obj, - database_obj) + create_mssql_vdb(engine, dx_session_obj.server_session, + jobs, group_obj, database_name, + environment_obj, database_obj) elif arg_type == "vfiles": - create_vfiles_vdb(engine, jobs, group_obj, - database_name, environment_obj, - database_obj, + create_vfiles_vdb(engine, dx_session_obj.server_session, + jobs, group_obj, database_name, + environment_obj, database_obj, arguments['--prerefresh'], arguments['--postrefresh'], arguments['--prerollback'], @@ -841,7 +847,7 @@ def main_workflow(engine): # running job count. i += 1 - print_info('%s: %s jobs running.' % (engine['hostname'], + print_info('{}: {} jobs running.'.format(engine['hostname'], str(i))) #If we have running jobs, pause before repeating the checks. @@ -884,13 +890,13 @@ def run_job(): if arguments['--engine']: try: engine = dx_session_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: %s\n' % - (arguments['--engine'])) + print_info('Executing against Delphix Engine: {}\n'.format( + arguments['--engine'])) except (DlpxException, RequestError, KeyError) as e: - raise DlpxException('\nERROR: Delphix Engine %s cannot be ' - 'found in %s. Please check your value ' - 'and try again. Exiting.\n' % ( + raise DlpxException('\nERROR: Delphix Engine {} cannot be ' + 'found in {}. Please check your value ' + 'and try again. Exiting.\n'.format( arguments['--engine'], config_file_path)) else: @@ -901,7 +907,7 @@ def run_job(): engine = dx_session_obj.dlpx_engines[delphix_engine] print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: %s' % ( + 'in the dxtools.conf: {}'.format( dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) break @@ -1006,13 +1012,17 @@ def set_timeflow_point(engine, server, container_obj): timeflow_point_parameters.container = container_obj.reference return timeflow_point_parameters -def time_elapsed(): + +def time_elapsed(time_start): """ This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time + Call this anywhere you want to note the progress in terms of time + + :param time_start: start time of the script. + :type time_start: float """ - elapsed_minutes = round((time() - time_start)/60, +1) - return elapsed_minutes + return round((time() - time_start)/60, +1) + def update_jobs_dictionary(engine, server, jobs): """ @@ -1040,11 +1050,11 @@ def update_jobs_dictionary(engine, server, jobs): return i -def main(argv): +def main(): #We want to be able to call on these variables anywhere in the script. global single_thread global usebackup - global time_start +# global time_start global config_file_path global database_name global host_name @@ -1074,67 +1084,54 @@ def main(argv): # all the servers. run_job() - elapsed_minutes = time_elapsed() - print_info('script took %s minutes to get this far. ' % - (str(elapsed_minutes))) + elapsed_minutes = time_elapsed(time_start) + print_info('script took {:.2f} minutes to get this far. '.format( + elapsed_minutes)) - #Here we handle what we do when the unexpected happens + # Here we handle what we do when the unexpected happens except SystemExit as e: - """ - This is what we use to handle our sys.exit(#) - """ + # This is what we use to handle our sys.exit(#) sys.exit(e) except DlpxException as e: - """ - We use this exception handler when an error occurs in a function call. - """ - - print('\nERROR: Please check the ERROR message below:\n%s' % - (e.message)) + # We use this exception handler when an error occurs in a function call. + print_exception('ERROR: Please check the ERROR message below:\n' + '{}'.format(e.message)) sys.exit(2) except HttpError as e: - """ - We use this exception handler when our connection to Delphix fails - """ - print('\nERROR: Connection failed to the Delphix Engine. Please ' - 'check the ERROR message below:\n%s' % (e.message)) + # We use this exception handler when our connection to Delphix fails + print_exception('ERROR: Connection failed to the Delphix Engine. Please' + 'check the ERROR message below:\n{}'.format(e.message)) sys.exit(2) except JobError as e: - """ - We use this exception handler when a job fails in Delphix so - that we have actionable data - """ - print 'A job failed in the Delphix Engine:\n%s' (e.job) - elapsed_minutes = time_elapsed() - print_info('%s took %s minutes to get this far' % (basename(__file__), - str(elapsed_minutes))) + # We use this exception handler when a job fails in Delphix so that we + # have actionable data + print_exception('A job failed in the Delphix Engine:\n{}'.format(e.job)) + elapsed_minutes = time_elapsed(time_start) + print_exception('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) sys.exit(3) except KeyboardInterrupt: - """ - We use this exception handler to gracefully handle ctrl+c exits - """ + # We use this exception handler to gracefully handle ctrl+c exits print_debug('You sent a CTRL+C to interrupt the process') - elapsed_minutes = time_elapsed() - print_info('%s took %s minutes to get this far' % (basename(__file__), - str(elapsed_minutes))) - + elapsed_minutes = time_elapsed(time_start) + print_info('{} took {:.2f} minutes to get this far'.format( + basename(__file__), elapsed_minutes)) except: - """ - Everything else gets caught here - """ - print(sys.exc_info()[0]) - print(traceback.format_exc()) - elapsed_minutes = time_elapsed() - print_info('%s took %s minutes to get this far' % (basename(__file__), - str(elapsed_minutes))) + # Everything else gets caught here + print_exception('{}\n{}'.format(sys.exc_info()[0], + traceback.format_exc())) + elapsed_minutes = time_elapsed(time_start) + print_info("{} took {:.2f} minutes to get this far".format( + basename(__file__), elapsed_minutes)) sys.exit(1) if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script + # Grab our arguments from the doc at the top of the script arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - #Feed our arguments to the main function, and off we go! - main(arguments) + + # Feed our arguments to the main function, and off we go! + main() diff --git a/v1_8_0/lib/GetReferences.py b/v1_8_0/lib/GetReferences.py index 75f711f..812a8ef 100644 --- a/v1_8_0/lib/GetReferences.py +++ b/v1_8_0/lib/GetReferences.py @@ -216,6 +216,7 @@ def find_obj_name(engine, f_class, obj_reference): obj_reference: The object reference to retrieve the name """ try: + print 'engine obj: {}, {}'.format(engine, obj_reference) obj_name = f_class.get(engine, obj_reference) return obj_name.name @@ -230,12 +231,11 @@ def find_dbrepo(engine, install_type, f_environment_ref, f_install_path): """ Function to find database repository objects by environment reference and install path, and return the object's reference as a string - You might use this function to find Oracle and PostGreSQL database repos. engine: Virtualization Engine Session object install_type: Type of install - Oracle, ASE, SQL f_environment_ref: Reference of the environment for the repository f_install_path: Path to the installation directory. - return: delphixpy.web.vo.SourceRepository object + return: delphixpy.web.vo.SourceRepository """ print_debug('Searching objects in the %s class for one with the ' diff --git a/v1_8_0/requirements.txt b/v1_8_0/requirements.txt index deed047..a8d864f 100644 --- a/v1_8_0/requirements.txt +++ b/v1_8_0/requirements.txt @@ -1,4 +1,5 @@ setuptools pip docopt -delphixpy \ No newline at end of file +delphixpy +python-dateutil From ed82cd0fb2b51f3b6d2143f01f2e9f4e91b14f1f Mon Sep 17 00:00:00 2001 From: Corey Brune Date: Thu, 4 Jan 2018 16:24:46 -0600 Subject: [PATCH 6/6] Updated JS code to reset and create from bookmarks --- v1_8_0/js_bookmark.py | 103 +++++++++++++++++++++++++----------- v1_8_0/js_branch.py | 81 ++++++++++++++++------------ v1_8_0/js_container.py | 24 ++++++++- v1_8_0/lib/GetReferences.py | 3 +- 4 files changed, 141 insertions(+), 70 deletions(-) diff --git a/v1_8_0/js_bookmark.py b/v1_8_0/js_bookmark.py index 658e0fc..29ab961 100755 --- a/v1_8_0/js_bookmark.py +++ b/v1_8_0/js_bookmark.py @@ -16,7 +16,7 @@ # """Creates, lists, removes a Jet Stream Bookmark Usage: - js_bookmark.py (--create_bookmark --data_layout [--branch_name | --activate_bookmark | --update_bookmark | --share_bookmark | --unshare_bookmark ) + js_bookmark.py (--create_bookmark --data_layout [--tag --description --branch_name | --activate_bookmark | --update_bookmark | --share_bookmark | --unshare_bookmark ) [--engine | --all] [--parallel ] [--poll ] [--debug] [--config ] [--logdir ] @@ -27,6 +27,7 @@ Examples: js_bookmark.py --list_bookmarks js_bookmark.py --create_bookmark jsbookmark1 --data_layout jstemplate1 + js_bookmark.py --create_bookmark jsbookmark1 --data_layout jstemplate1 --tag 1.86.2 --description "Before commit" js_bookmark.py --create_bookmark jsbookmark1 --data_layout jstemplate1 --branch_name jsbranch1 js_bookmark.py --activate_bookmark jsbookmark1 js_bookmark.py --update_bookmark jsbookmark1 @@ -37,6 +38,8 @@ Options: --create_bookmark Name of the new JS Bookmark --container_name Name of the container to use + --tag Tag to use for this bookmark + --description Description of this bookmark --update_bookmark Name of the bookmark to update --share_bookmark Name of the bookmark to share --unshare_bookmark Name of the bookmark to unshare @@ -59,7 +62,7 @@ -v --version Show version. """ -VERSION="v.0.0.015" +VERSION="v.0.0.018" from docopt import docopt from os.path import basename @@ -72,7 +75,6 @@ from delphixpy.v1_8_0.web.jetstream import branch from delphixpy.v1_8_0.web.jetstream import template from delphixpy.v1_8_0.web.jetstream import container -from delphixpy.v1_8_0.web.jetstream import datasource from delphixpy.v1_8_0.web.vo import JSBookmarkCreateParameters from delphixpy.v1_8_0.web.vo import JSBookmark from delphixpy.v1_8_0.exceptions import RequestError @@ -90,18 +92,23 @@ from lib.DxLogging import print_exception -def create_bookmark(dlpx_obj, bookmark_name, source_layout, branch_name=None): +def create_bookmark(dlpx_obj, bookmark_name, source_layout, branch_name=None, + tag=None, description=None): """ Create the JS Bookmark :param dlpx_obj: Virtualization Engine session object :type dlpx_obj: lib.GetSession.GetSession :param bookmark_name: Name of the bookmark to create - :type bookmark_name: str + :type bookmark_name: basestring :param source_layout: Name of the source (template or container) to use - :type source_layout: str + :type source_layout: basestring :param branch_name: Name of the branch to use - :type branch_name: str + :type branch_name: basestring + :param tag_name: Tag to use for the bookmark + :type tag: basestring + :param description: Description of the bookmark + :type description: basestring """ branch_ref = None @@ -109,28 +116,41 @@ def create_bookmark(dlpx_obj, bookmark_name, source_layout, branch_name=None): engine_name = dlpx_obj.dlpx_engines.keys()[0] js_bookmark_params = JSBookmarkCreateParameters() if branch_name: - if branch_name == 'master' or branch_name == 'default': - try: - source_layout_ref = find_obj_by_name(dlpx_obj.server_session, - template, - source_layout).reference - except DlpxException: - source_layout_ref = find_obj_by_name( - dlpx_obj.server_session, container, - source_layout).reference - for branch_obj in branch.get_all(dlpx_obj.server_session): - if branch_name == branch_obj.name and \ - source_layout_ref == branch_obj.data_layout: - branch_ref = branch_obj.reference - break + try: + source_layout_ref = find_obj_by_name(dlpx_obj.server_session, + template, + source_layout).reference + except DlpxException: + source_layout_ref = find_obj_by_name( + dlpx_obj.server_session, container, + source_layout).reference + #import pdb;pdb.set_trace() + for branch_obj in branch.get_all(dlpx_obj.server_session): + if branch_name == branch_obj.name and \ + source_layout_ref == branch_obj.data_layout: + branch_ref = branch_obj.reference + break + if branch_ref is None: + raise DlpxException('Set the --data_layout parameter equal to ' + 'the data layout of the bookmark.\n') elif branch_name is None: - (source_layout_ref, branch_ref) = find_obj_by_name( - dlpx_obj.server_session, template, source_layout, True) + try: + (source_layout_ref, branch_ref) = find_obj_by_name( + dlpx_obj.server_session, template, source_layout, True) + except DlpxException: + (source_layout_ref, branch_ref) = find_obj_by_name( + dlpx_obj.server_session, container, source_layout, True) if branch_ref is None: raise DlpxException('Could not find {} in engine {}'.format( branch_name, engine_name)) - js_bookmark_params.bookmark = {'name': bookmark_name, 'branch': branch_ref, - 'type': 'JSBookmark'} + js_bookmark_params.bookmark = JSBookmark() + js_bookmark_params.bookmark.name = bookmark_name + js_bookmark_params.bookmark.branch = branch_ref + if tag: + js_bookmark_params.bookmark.tags = list() + js_bookmark_params.bookmark.tags.append(tag) + if description: + js_bookmark_params.bookmark.description = description js_bookmark_params.timeline_point_parameters = { 'sourceDataLayout': source_layout_ref, 'type': 'JSTimelinePointLatestTimeInput'} @@ -145,24 +165,38 @@ def create_bookmark(dlpx_obj, bookmark_name, source_layout, branch_name=None): 'was:\n\n{}'.format(bookmark_name, e)) -def list_bookmarks(dlpx_obj): +def list_bookmarks(dlpx_obj, tag_filter=None): """ List all bookmarks on a given engine :param dlpx_obj: Virtualization Engine session object + :param tag_filter: Only list bookmarks with given tag """ - header = '\nName, Reference, Branch Name, Template Name' + header = '\nName, Reference, Branch Name, Template Name, Tags' try: js_bookmarks = bookmark.get_all(dlpx_obj.server_session) print header for js_bookmark in js_bookmarks: branch_name = find_obj_name(dlpx_obj.server_session, branch, js_bookmark.branch) - print '{}, {}, {}, {}'.format(js_bookmark.name, - js_bookmark.reference, branch_name, - js_bookmark.template_name) + if tag_filter in js_bookmark.tags: + print '{}, {}, {}, {}, {}'.format(js_bookmark.name, + js_bookmark.reference, + branch_name, + js_bookmark.template_name, + ", ".join(tag for tag in + js_bookmark.tags)) + elif tag_filter is None: + tag = js_bookmark.tags if js_bookmark.tags else None + if tag: + tag = ", ".join(tag for tag in js_bookmark.tags) + print '{}, {}, {}, {}, {}'.format(js_bookmark.name, + js_bookmark.reference, + branch_name, + js_bookmark.template_name, + tag) print '\n' except (DlpxException, HttpError, RequestError) as e: @@ -331,7 +365,11 @@ def main_workflow(engine, dlpx_obj): arguments['--data_layout'], arguments['--branch_name'] if arguments['--branch_name'] - else None) + else None, + arguments['--tag'] + if arguments['--tag'] else None, + arguments['--description'] + if arguments['--description'] else None) elif arguments['--delete_bookmark']: delete_bookmark(dlpx_obj, arguments['--delete_bookmark']) @@ -345,7 +383,8 @@ def main_workflow(engine, dlpx_obj): unshare_bookmark(dlpx_obj, arguments['--unshare_bookmark']) elif arguments['--list_bookmarks']: - list_bookmarks(dlpx_obj) + list_bookmarks(dlpx_obj, + arguments['--tag'] if arguments['--tag'] else None) thingstodo.pop() # get all the jobs, then inspect them i = 0 diff --git a/v1_8_0/js_branch.py b/v1_8_0/js_branch.py index 9d6e16e..725c1d5 100755 --- a/v1_8_0/js_branch.py +++ b/v1_8_0/js_branch.py @@ -16,7 +16,7 @@ # """Creates, updates, deletes, activates and lists branches Usage: - js_branch.py (--create_branch --container_name --template_name | --list_branches | --delete_branch | --activate_branch | --update_branch ) + js_branch.py (--create_branch --container_name [--template_name | --bookmark_name ]| --list_branches | --delete_branch | --activate_branch | --update_branch ) [--engine | --all] [--parallel ] [--poll ] [--debug] [--config ] [--logdir ] @@ -33,6 +33,7 @@ Options: --create_branch Name of the new JS Branch + --bookmark_name Name of the container to use --update_branch Name of the branch to update --template_name Name of the template to use @@ -53,7 +54,7 @@ -v --version Show version. """ -VERSION="v.0.0.010" +VERSION="v.0.0.015" from docopt import docopt from os.path import basename @@ -67,7 +68,10 @@ from delphixpy.v1_8_0.web.jetstream import container from delphixpy.v1_8_0.web.jetstream import template from delphixpy.v1_8_0.web.jetstream import operation +from delphixpy.v1_8_0.web.jetstream import bookmark from delphixpy.v1_8_0.web.vo import JSBranchCreateParameters +from delphixpy.v1_8_0.web.vo import JSTimelinePointBookmarkInput +from delphixpy.v1_8_0.web.vo import JSTimelinePointLatestTimeInput from delphixpy.v1_8_0.web.vo import JSBranch from delphixpy.v1_8_0.exceptions import RequestError from delphixpy.v1_8_0.exceptions import JobError @@ -83,45 +87,51 @@ from lib.DxLogging import print_exception -def create_branch(dlpx_obj, branch_name, template_name, container_name): +def create_branch(dlpx_obj, branch_name, container_name, template_name=None, + bookmark_name=None): """ Create the JS Branch - dlpx_obj: Virtualization Engine session object - branch_name: Name of the branch to create - template_name: Name of the template to use - container_name: Name of the container to use + :param dlpx_obj: Virtualization Engine session object + :param branch_name: Name of the branch to create + :param container_name: Name of the container to use + :param template_name: Name of the template to use + :param bookmark_name: Name of the bookmark to use """ - js_branch_params = JSBranchCreateParameters() - js_branch_params.name = branch_name + js_branch = JSBranchCreateParameters() + js_branch.name = branch_name engine_name = dlpx_obj.dlpx_engines.keys()[0] + data_container_obj = find_obj_by_name(dlpx_obj.server_session, + container, container_name) + js_branch.data_container = data_container_obj.reference + + if bookmark_name: + js_branch.timeline_point_parameters = JSTimelinePointBookmarkInput() + js_branch.timeline_point_parameters.bookmark = find_obj_by_name( + dlpx_obj.server_session, bookmark, bookmark_name).reference + elif template_name: + source_layout_ref = find_obj_by_name(dlpx_obj.server_session, + template, template_name).reference + js_branch.timeline_point_parameters = JSTimelinePointLatestTimeInput() + js_branch.timeline_point_parameters.source_data_layout = \ + source_layout_ref + try: - data_container_obj = find_obj_by_name(dlpx_obj.server_session, - container, container_name) - source_layout_obj = find_obj_by_name(dlpx_obj.server_session, - template, template_name) - js_branch_params.data_container = data_container_obj.reference - js_branch_params.timeline_point_parameters = { - 'sourceDataLayout': - source_layout_obj.reference, - 'type': - 'JSTimelinePointLatestTimeInput'} - branch.create(dlpx_obj.server_session, js_branch_params) + branch.create(dlpx_obj.server_session, js_branch) dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job - print_info('JS Branch {} was created successfully.'.format( - branch_name)) except (DlpxException, RequestError, HttpError) as e: print_exception('\nThe branch was not created. The error was:' '\n{}'.format(e)) + print_info('JS Branch {} was created successfully.'.format( + branch_name)) def list_branches(dlpx_obj): """ List all branches on a given engine - dlpx_obj: Virtualization Engine session object - No args required + :param dlpx_obj: Virtualization Engine session object """ try: @@ -152,8 +162,8 @@ def update_branch(dlpx_obj, branch_name): """ Updates a branch - dlpx_obj: Virtualization Engine session object - branch_name: Name of the branch to update + :param dlpx_obj: Virtualization Engine session object + :param branch_name: Name of the branch to update """ js_branch_obj = JSBranch() @@ -173,8 +183,8 @@ def activate_branch(dlpx_obj, branch_name): """ Activates a branch - dlpx_obj: Virtualization Engine session object - branch_name: Name of the branch to activate + :param dlpx_obj: Virtualization Engine session object + :param branch_name: Name of the branch to activate """ engine_name = dlpx_obj.dlpx_engines.keys()[0] @@ -193,8 +203,8 @@ def activate_branch(dlpx_obj, branch_name): def delete_branch(dlpx_obj, branch_name): """ Deletes a branch - dlpx_obj: Virtualization Engine session object - branch_name: Branch to delete + :param dlpx_obj: Virtualization Engine session object + :param branch_name: Branch to delete """ try: @@ -257,8 +267,8 @@ def main_workflow(engine, dlpx_obj): The @run_async decorator allows us to run against multiple Delphix Engine simultaneously - engine: Dictionary of engines - dlpx_obj: Virtualization Engine session object + :param engine: Dictionary of engines + :param dlpx_obj: Virtualization Engine session object """ #Establish these variables as empty for use later @@ -283,8 +293,11 @@ def main_workflow(engine, dlpx_obj): if len(thingstodo) > 0: if arguments['--create_branch']: create_branch(dlpx_obj, arguments['--create_branch'], - arguments['--template_name'], - arguments['--container_name']) + arguments['--container_name'], + arguments['--template_name'] + if arguments['--template_name'] else None, + arguments['--bookmark_name'] + if arguments['--bookmark_name'] else None) elif arguments['--delete_branch']: delete_branch(dlpx_obj, arguments['--delete_branch']) elif arguments['--update_branch']: diff --git a/v1_8_0/js_container.py b/v1_8_0/js_container.py index b2226d5..7d10484 100755 --- a/v1_8_0/js_container.py +++ b/v1_8_0/js_container.py @@ -16,7 +16,7 @@ # """Create, delete, refresh and list JS containers. Usage: - js_container.py (--create_container --template_name --database | --list_hierarchy | --list | --delete_container [--keep_vdbs]| --refresh_container | --add_owner --container_name | --remove_owner --container_name | --restore_container --bookmark_name ) + js_container.py (--create_container --template_name --database | --reset | --list_hierarchy | --list | --delete_container [--keep_vdbs]| --refresh_container | --add_owner --container_name | --remove_owner --container_name | --restore_container --bookmark_name ) [--engine | --all] [--parallel ] [--poll ] [--debug] [--config ] [--logdir ] @@ -35,12 +35,14 @@ js_container.py --remove_owner jsuser --container_name jscontainer1 js_container.py --refresh_container jscontainer1 js_container.py --restore_container jscontainer1 --bookmark_name jsbookmark1 + js_conatiner.py --reset jscontainer1 Options: --create_container Name of the new JS Container --container_name Name of the JS Container --refresh_container Name of the new JS Container --restore_container Name of the JS Container to restore + --reset Reset last data operation --template_name Name of the JS Template to use for the container --add_owner Name of the JS Owner for the container --remove_owner Name of the JS Owner to remove @@ -66,7 +68,7 @@ -v --version Show version. """ -VERSION = "v.0.0.015" +VERSION = "v.0.0.020" from os.path import basename import sys @@ -277,6 +279,22 @@ def list_containers(dlpx_obj): 'error was:\n\n{}'.format(e)) +def reset_container(dlpx_obj, container_name): + """ + Undo the last refresh or restore operation + :param dlpx_obj: Virtualization Engine session object + :param container_name: Name of the container to reset + """ + try: + container.reset(dlpx_obj.server_session, find_obj_by_name( + dlpx_obj.server_session, container, container_name).reference) + except RequestError as e: + print_exception('\nERROR: JS Container was not reset. The ' + 'error was:\n\n{}'.format(e)) + print 'Container {} was reset.\n'.format(container_name) + + + def list_hierarchy(dlpx_obj, container_name): """ Filter container listing. @@ -439,6 +457,8 @@ def main_workflow(engine, dlpx_obj): arguments['--refresh_container']) elif arguments['--list_hierarchy']: list_hierarchy(dlpx_obj, arguments['--list_hierarchy']) + elif arguments['--reset']: + reset_container(dlpx_obj, arguments['--reset']) thingstodo.pop() # get all the jobs, then inspect them i = 0 diff --git a/v1_8_0/lib/GetReferences.py b/v1_8_0/lib/GetReferences.py index 812a8ef..3f18fe9 100644 --- a/v1_8_0/lib/GetReferences.py +++ b/v1_8_0/lib/GetReferences.py @@ -20,7 +20,7 @@ from DxLogging import print_debug from DxLogging import print_exception -VERSION = 'v.0.2.0019' +VERSION = 'v.0.2.0020' def convert_timestamp(engine, timestamp): """ @@ -216,7 +216,6 @@ def find_obj_name(engine, f_class, obj_reference): obj_reference: The object reference to retrieve the name """ try: - print 'engine obj: {}, {}'.format(engine, obj_reference) obj_name = f_class.get(engine, obj_reference) return obj_name.name