diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..f9dc5b9 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,27 @@ +name: CI + +on: + pull_request: + push: + paths-ignore: + - '**.md' + +jobs: + test: + name: Run style checks + runs-on: ubuntu-latest + strategy: + matrix: + python: [3.7] + + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install Tox and any other packages + run: pip install tox + - name: Run tox + run: tox -- -a diff --git a/.gitignore b/.gitignore index 721b3f2..019879a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,12 @@ *.gz -*.log *.swp *.pyc *.key *.pub *.DS_Store -dxtools.conf +*.log +config/dxtools.conf +venv +.DS_Store +**/.idea +.tox/ \ No newline at end of file diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 0000000..5c98b42 --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,2 @@ +# Default ignored files +/workspace.xml \ No newline at end of file diff --git a/.idea/codeStyles/codeStyleConfig.xml b/.idea/codeStyles/codeStyleConfig.xml new file mode 100644 index 0000000..a55e7a1 --- /dev/null +++ b/.idea/codeStyles/codeStyleConfig.xml @@ -0,0 +1,5 @@ + + + + \ No newline at end of file diff --git a/.idea/delphixpy-examples.iml b/.idea/delphixpy-examples.iml new file mode 100644 index 0000000..0e4e9fa --- /dev/null +++ b/.idea/delphixpy-examples.iml @@ -0,0 +1,10 @@ + + + + + + + + + + \ No newline at end of file diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 0000000..105ce2d --- /dev/null +++ b/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 0000000..3c29c38 --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 0000000..cdd9eab --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 0000000..94a25f7 --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 0000000..51f0ab3 --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,4 @@ +[settings] +force_single_line=True +known_first_party=delphixpy,lib +default_section=THIRDPARTY \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..1ed0e34 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,12 @@ +repos: +- repo: https://github.com/psf/black + rev: 22.10.0 + hooks: + - id: black + args: + - --fast + +- repo: https://github.com/timothycrosley/isort + rev: 4.3.21 + hooks: + - id: isort diff --git a/.swn b/.swn new file mode 100644 index 0000000..fac3df7 Binary files /dev/null and b/.swn differ diff --git a/README.md b/README.md index d18c5b6..4a2ec3f 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,16 @@ # delphixpy-examples -These are some example python scripts I put together to serve as -examples for those getting started with Delphix the delphixpy python -module. +These are some example python scripts for those getting started +with the Delphix delphixpy python module. -## Thanks -First, a lot of thanks to Corey Brune -([@mcbrune](https://github.com/mcbrune)) for all of his contributions -that make this spaghetti look decent. +## Changes in this Branch +- This branch requires Python3. All enhancements and break fixes will + be committed to this branch and will eventually become the + ``master`` branch. +- We have a new format for dxtools.conf. If you used this repo in the + past, please be aware of the new format. +- All connections use HTTPS by default. Please refer to + lib/get\_session.py if using this repo in a production environment. +- Migrated from Delphix API version 1.8.0 to 1.10.2. ## Wait... What's Delphix? In the most simplest answer, [Delphix](http://www.delphix.com) is an @@ -21,20 +25,74 @@ directly invoke the Delphix API via python. ## Where can I get delphixpy? delphixpy is available on PyPy, so you can install it by invoking pip - pip install delphixpy + pip3 install delphixpy ## How do I use these examples? Clone this repository to your system where python is installed. Then install the pip packages in the requirements.txt file: - pip install --upgrade -r requirements.txt + pip3 install --upgrade -r requirements.txt Once that is complete, you are ready to use the scripts with your Delphix environment. Each of the scripts have POSIX compliant help. The options are also explained along with examples. I am going -to explain more on these scripts in my blog and on [the Delphix +to explain more on these scripts in @CloudSurgeon and on [the Delphix community page](https://community.delphix.com) +## Contribute + +1. Fork the project. +2. Make your bug fix or new feature. +3. Add tests for your code. +4. Send a pull request. + +Contributions must be signed as `User Name `. Make +sure to [set up Git with user name and email +address](https://git-scm.com/book/en/v2/Getting-Started-First-Time-Git-Setup). Bug +fixes should branch from the current stable branch + +### Formatting + +This repository uses the `tox` and `pre-commit` tools to run +autoformatters on the entire repository. These two tools are the +industry standard for Python. The goal of these formatters is to +delegate issues of formatting to the machine so that develeopers and +code-reviewers can focus on more important things. + +The two main autoformatters that we use are + - `black`: General Python formatting + - `isort`: Import sorting + +## Running the formatting + +The formatting is automatically run remotely on every Github pull +request and on every push to Github. + +It is possible to run these locally in two ways. Automatically before +every push and manually. + +To have the checks run automatically before every push you can enable +`pre-commit`. + +``` +tox +.tox/format/bin/pre-commit install --hook-type pre-push +``` + +To run the checks manually: +On the entire repository +``` + tox -- --all-files +``` +on a specific file +``` + tox -- --file +``` +On every file in the most recent commit +``` + git diff-tree --no-commit-id --name-only -r HEAD | xargs tox -- --files +``` + ## Something neat worth noting Each of the scripts leverage [docopt](https://github.com/docopt/docopt), which is a great module diff --git a/add_windows_env.py b/add_windows_env.py deleted file mode 100755 index 0081c32..0000000 --- a/add_windows_env.py +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env python -#Adam Bowen Sept 2016 -VERSION="v.0.0.001" -#just a quick and dirty example of adding a windows source - -from delphixpy.delphix_engine import DelphixEngine -from delphixpy.web import environment -from delphixpy.web.vo import HostEnvironmentCreateParameters, EnvironmentUser, PasswordCredential, \ - WindowsHostEnvironment, WindowsHostCreateParameters, WindowsHost - -engine_address = "192.168.2.37" -engine_username = "delphix_admin" -engine_password = "landshark" - -def serversess(f_engine_address, f_engine_username, f_engine_password): - """ - Function to setup the session with the Delphix Engine - """ - server_session= DelphixEngine(f_engine_address, f_engine_username, f_engine_password, "DOMAIN") - return server_session - -server = serversess(engine_address, engine_username, engine_password) - -envCreateParams = HostEnvironmentCreateParameters() - - -envCreateParams.primary_user = EnvironmentUser() -envCreateParams.primary_user.name = "delphix\delphix_admin" -envCreateParams.primary_user.credential = PasswordCredential() -envCreateParams.primary_user.credential.password = "delphix" -envCreateParams.host_environment = WindowsHostEnvironment() -envCreateParams.host_environment.name = "WINDOWSSOURCE" -envCreateParams.host_environment.proxy = "WINDOWS_HOST-6" #This is the Host ID of the Windows Server that houses the connector -envCreateParams.host_parameters = WindowsHostCreateParameters() -envCreateParams.host_parameters.host = WindowsHost() -envCreateParams.host_parameters.host.address = "WINDOWSSOURCE" - -environment.create(server, envCreateParams) diff --git a/config/dxtools.conf b/config/dxtools.conf new file mode 100644 index 0000000..9f02d3d --- /dev/null +++ b/config/dxtools.conf @@ -0,0 +1,12 @@ +{ + "myve2": [ + { + "ip_address": "18.207.2.95", + "username": "delphix_admin", + "password": "delphix", + "use_https": "True", + "default": "True", + "hostname": "myve2" + } + ] +} diff --git a/delphix_admin_setup.py b/delphix_admin_setup.py deleted file mode 100755 index 2897434..0000000 --- a/delphix_admin_setup.py +++ /dev/null @@ -1,168 +0,0 @@ -#!/usr/bin/env python -''' -Adam Bowen - Jan 2016 -This script configures the delphix_admin user after domain0 is configured -Will come back and properly throw this with logging, etc -''' -VERSION="v.2.3.002" -CONTENTDIR="/u02/app/content" - -import getopt -import logging -from os.path import basename -import signal -import sys -import time -import traceback -import untangle - -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.exceptions import HttpError, JobError -from delphixpy.v1_6_0.web import user -from delphixpy.v1_6_0.web.vo import CredentialUpdateParameters, PasswordCredential, User - - -def serversess(f_engine_address, f_engine_username, f_engine_password): - ''' - Function to grab the server session - ''' - server_session= DelphixEngine(f_engine_address, f_engine_username, f_engine_password, "DOMAIN") - return server_session - -def help(): - print("\n" + basename(__file__)+ " [-e ] [-o - Engine must be up, unconfigured, and console screen must be green") - print("-o - will use this password to initially access the system") - print("-p - will set the delphix_admin user to this password") - print("-v - Print version information and exit") - sys.exit(2) - -def logging_est(): - ''' - Establish Logging - ''' - global debug - logging.basicConfig(filename='landshark_setup.log',format='%(levelname)s:%(asctime)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') - print_info("Welcome to " + basename(__file__) + ", version " + VERSION) - global logger - debug = True - logger = logging.getLogger() - logger.setLevel(10) - print_info("Debug Logging is enabled.") - -def on_exit(sig, func=None): - print_info("Shutdown Command Received") - print_info("Shutting down prime_setup.py") - sys.exit(0) - -def print_debug(print_obj): - ''' - DEBUG Log-level - ''' - if debug == True: - print "DEBUG: " + str(print_obj) - logging.debug(str(print_obj)) - -def print_error(print_obj): - ''' - ERROR Log-level - ''' - print "ERROR: " + str(print_obj) - logging.error(str(print_obj)) - -def print_info(print_obj): - ''' - INFO Log-level - ''' - print "INFO: " + str(print_obj) - logging.info(str(print_obj)) - -def print_warning(print_obj): - ''' - WARNING Log-level - ''' - print "WARNING: " + str(print_obj) - logging.warning(str(print_obj)) - -def set_exit_handler(func): - signal.signal(signal.SIGTERM, func) - -def time_elapsed(): - elapsed_minutes = round((time.time() - time_start)/60, +1) - return elapsed_minutes - -def version(): - print("Version: " +VERSION) - logging_est() - set_exit_handler(on_exit) - sys.exit(1) - -def main(argv): - try: - logging_est() - global time_start - time_start = time.time() - engine_ip = "" - engine_pass = "" - old_engine_pass = "" - try: - opts,args = getopt.getopt(argv,"e:o:p:hv") - except getopt.GetoptError: - help() - for opt, arg in opts: - if opt == '-h': - help() - elif opt == '-e': - engine_ip = arg - elif opt == '-o': - old_engine_pass = arg - elif opt == '-p': - engine_pass = arg - elif opt == '-v': - version() - - if (engine_ip == "" or engine_pass == "" or old_engine_pass == "") : - help() - - server = serversess(engine_ip, "delphix_admin", old_engine_pass) - - if user.get(server, "USER-2").email_address == None: - print_debug("Setting delphix_admin's email address") - delphix_admin_user = User() - delphix_admin_user.email_address = "spam@delphix.com" - user.update(server, 'USER-2', delphix_admin_user) - - print_debug("Setting delphix_admin's password") - delphix_admin_credupdate = CredentialUpdateParameters() - delphix_admin_credupdate.new_credential = PasswordCredential() - delphix_admin_credupdate.new_credential.password = engine_pass - user.update_credential(server, 'USER-2', delphix_admin_credupdate) - else: - print_info("The delphix_admin user has already been setup") - - except SystemExit as e: - sys.exit(e) - except HttpError as e: - print_error("Connection failed to the Delphix Engine") - print_error( "Please check the ERROR message below") - print_error(e.message) - sys.exit(2) - except JobError as e: - print_error("A job failed in the Delphix Engine") - print_error(e.job) - elapsed_minutes = time_elapsed() - print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") - except KeyboardInterrupt: - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") - except: - print_error(sys.exc_info()[0]) - print_error(traceback.format_exc()) - elapsed_minutes = time_elapsed() - print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") - -if __name__ == "__main__": - main(sys.argv[1:]) diff --git a/delphix_snapshot_group_will_plugin.py b/delphix_snapshot_group_will_plugin.py deleted file mode 100755 index 89833f2..0000000 --- a/delphix_snapshot_group_will_plugin.py +++ /dev/null @@ -1,31 +0,0 @@ -#!/usr/bin/env python -#For use with HipChat and Will -#https://github.com/skoczen/will - -from will.plugin import WillPlugin -from will.decorators import respond_to, periodic, hear, randomly, route, rendered_template, require_settings -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.web import group, database -from delphixpy.v1_6_0 import job_context - -class DelphixSnapshotPlugin(WillPlugin): - - @respond_to("snapshot_group (?P.*)") - def snapshot_group_will(self, message, v_object=None): - group_name = v_object - #database_name = "Employee DB - Dev" - - server_session = DelphixEngine("landsharkengine", "delphix_admin", "landshark", "DOMAIN") - - all_groups = group.get_all(server_session) - - for each in all_groups: - if group_name == each.name: - group_reference = each.reference - break - - database_objs = database.get_all(server_session, group=group_reference) - - with job_context.async(server_session): - for obj in database_objs: - database.sync(server_session, obj.reference) diff --git a/delphix_will_plugin.py b/delphix_will_plugin.py deleted file mode 100755 index ba58e07..0000000 --- a/delphix_will_plugin.py +++ /dev/null @@ -1,96 +0,0 @@ -#!/usr/bin/env python -#For use with HipChat and Will -#https://github.com/skoczen/will - -from will.plugin import WillPlugin -from will.decorators import respond_to, periodic, hear, randomly, route, rendered_template, require_settings -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.web import database -import imp, subprocess, shlex - -VERSION=0.001 - - - -class DelphixPlugin(WillPlugin): - - @respond_to("listvdbs") - def list_databases_will(self, message): - foo = imp.load_source('list_all_databases', 'delphixpy-examples/list_all_databases.py') - vdblist="\n".join(each.name for each in foo.all_databases) - will_response = "There are " + str(len(foo.all_databases)) + " databases in the LandsharkEngine\n" + vdblist - self.reply(message, will_response) - - @respond_to("snapshot (?P.*)") - def snapshot_databases_will(self, message, v_object=None): - if " in " not in v_object: - will_response="Please specify group with request. For example:\n \ - snapshot Employee Oracle 11G DB in Sources" - self.reply(message, will_response) - else: - v_object = v_object.split(' in ',1) - vdb_name = v_object[0] - vdb_group = v_object[1] - self.reply(message, "Snapping " + vdb_name + ". Will let you know when it is complete.") - p = subprocess.Popen(['python', 'delphixpy-examples/dx_snapshot_db.py', '--group', vdb_group, '--name', \ - vdb_name, '--config', 'delphixpy-examples/dxtools.conf'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - self.reply(message, vdb_name + " Snapshot Complete\n" + p.stdout.read()) - - @respond_to("provision vdb (?P.*)") - def provision_databases_will(self, message, v_object=None): - provision_parameters = shlex.split('python delphixpy-examples/dx_provision_vdb.py --config delphixpy-examples/dxtools.conf ' + v_object) - self.reply(message, str(provision_parameters)) - self.reply(message, "Executing provision job") - p = subprocess.Popen(provision_parameters, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - self.reply(message, "Provision Request Complete\n" + p.stdout.read()) - - @respond_to("delete vdb (?P.*)") - def delete_databases_will(self, message, v_object=None): - if " in " not in v_object: - will_response="Please specify group with request. For example:\n \ - delete Employee Oracle 11G DB in Sources" - self.reply(message, will_response) - else: - v_object = v_object.split(' in ',1) - vdb_name = v_object[0] - vdb_group = v_object[1] - self.reply(message, "Deleting " + vdb_name + ". Will let you know when it is complete.") - p = subprocess.Popen(['python', 'delphixpy-examples/dx_delete_vdb.py', '--group', vdb_group, '--name', \ - vdb_name, '--config', 'delphixpy-examples/dxtools.conf'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - self.reply(message, vdb_name + " Delete Complete\n" + p.stdout.read()) - - @respond_to("refresh vdb (?P.*)") - def refresh_vdbs_will(self, message, v_object=None): - if " in " not in v_object: - will_response="Please specify group with request. For example:\n \ - refresh autoprod in Analytics" - self.reply(message, will_response) - else: - v_object = v_object.split(' in ',1) - vdb_name = v_object[0] - vdb_group = v_object[1] - self.reply(message, "Refreshing " + vdb_name + ". Will let you know when it is complete.") - p = subprocess.Popen(['python', 'delphixpy-examples/dx_refresh_db.py', '--group', vdb_group, '--name', \ - vdb_name, '--config', 'delphixpy-examples/dxtools.conf', '--timestamp', '@2016-10-14T20:55:05.995Z'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - self.reply(message, vdb_name + " Refresh Complete\n" + p.stdout.read()) - - @respond_to("refresh jetstream (?P.*)") - def refresh_jetstream_will(self, message, v_object=None): - if " in " not in v_object: - will_response="Please specify group with request. For example:\n \ - refresh jetstream Sugar Automated Testing Container in Masked SugarCRM Application" - self.reply(message, will_response) - else: - v_object = v_object.split(' in ',1) - container_name = v_object[0] - container_template = v_object[1] - self.reply(message, "Refreshing Jetstream Container: " + container_name + ". Will let you know when it is complete.") - p = subprocess.Popen(['python', 'delphixpy-examples/dx_jetstream_container.py', '--operation', 'refresh', \ - '--template', container_template, '--container', container_name, '--config', 'delphixpy-examples/dxtools.conf'], \ - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - self.reply(message, container_name + " Refresh Complete\n" + p.stdout.read()) - - @respond_to("bonjour") - def say_bonjour_will(self, message): - """bonjour: Landshark parles the Francais!""" - self.reply(message, "bonjour! Je m'appelle Landshark! Je suis pret a travailler!") diff --git a/dx_authorization.py b/dx_authorization.py deleted file mode 100755 index d35dbae..0000000 --- a/dx_authorization.py +++ /dev/null @@ -1,447 +0,0 @@ -#!/usr/bin/env python -# Corey Brune - Oct 2016 -# Creates an authorization object -# requirements -# pip install docopt delphixpy - -# The below doc follows the POSIX compliant standards and allows us to use -# this doc to also define our arguments for the script. -"""List, create or remove authorizations for a Virtualization Engine -Usage: - dx_authorization.py (--create --role --target_type --target --user | --list | --delete --role --target_type --target --user ) - [--engine | --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - dx_authorization.py -h | --help | -v | --version -List, delete and create authentication objects - -Examples: - dx_authorization.py --engine landsharkengine --create --role Data --user dev_user --target_type database --target test_vdb - dx_authorization.py --engine landsharkengine --create --role Data --user dev_user --target_type group --target Sources - dx_authorization.py --list - dx_authorization.py --delete --role Data --user dev_user --target_type database --target test_vdb - -Options: - --create Create an authorization - --role Role for authorization. Valid Roles are Data, - Read, Jet Stream User, OWNER, PROVISIONER - --target Target object for authorization - --target_type Target type. Valid target types are snapshot, - group, database - --user User for the authorization - --list List all authorizations - --delete Delete authorization - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --debug Enable debug logging - --parallel Limit number of jobs to maxjob - --poll The number of seconds to wait between job polls - [default: 10] - --config The path to the dxtools.conf file - [default: ./dxtools.conf] - --logdir The path to the logfile you want to use. - [default: ./dx_authorization.log] - -h --help Show this screen. - -v --version Show version. -""" - -VERSION = 'v.0.0.015' - -from docopt import docopt -from os.path import basename -import sys -from time import sleep, time -import traceback - -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0.web import database -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web import role -from delphixpy.v1_8_0.web import authorization -from delphixpy.v1_8_0.web import user -from delphixpy.v1_8_0.web import snapshot -from delphixpy.v1_8_0.web import group -from delphixpy.v1_8_0.web.vo import User -from delphixpy.v1_8_0.web.vo import Authorization - -from lib.DlpxException import DlpxException -from lib.GetSession import GetSession -from lib.GetReferences import find_obj_by_name -from lib.DxLogging import logging_est -from lib.DxLogging import print_info -from lib.DxLogging import print_debug -from lib.DxLogging import print_exception - - -def create_authorization(dlpx_obj, role_name, target_type, target_name, - user_name): - """ - Function to start, stop, enable or disable a VDB - - :param dlpx_obj: Virtualization Engine session object - :type dlpx_obj: lib.GetSession.GetSession - :param role_name: Name of the role - :param target_type: Supports snapshot, group and database target types - :param target_name: Name of the target - :param user_name: User for the authorization - """ - - authorization_obj = Authorization() - print_debug('Searching for {}, {} and {} references.\n'.format( - role_name, target_name, user_name)) - try: - authorization_obj.role = find_obj_by_name(dlpx_obj.server_session, role, - role_name).reference - authorization_obj.target = find_target_type(dlpx_obj, target_type, - target_name).reference - authorization_obj.user = find_obj_by_name(dlpx_obj.server_session, user, - user_name).reference - authorization.create(dlpx_obj.server_session, authorization_obj) - except (RequestError, HttpError, JobError) as e: - print_exception('An error occurred while creating authorization:\n' - '{}'.format(e)) - print 'Authorization successfully created for {}.'.format(user_name) - - -def delete_authorization(dlpx_obj, role_name, target_type, target_name, - user_name): - """ - Function to delete a given authorization - - :param dlpx_obj: Virtualization Engine session object - :type dlpx_obj: lib.GetSession.GetSession - :param role_name: Name of the role - :type role_name: basestring - :param target_type: Supports snapshot, group and database target types - :type target_type basestring - :param target_name: Name of the target - :type target_name: basestring - :param user_name: User for the authorization - :type user_name: basestring - """ - target_obj = find_target_type(dlpx_obj, target_type, target_name) - user_obj = find_obj_by_name(dlpx_obj.server_session, user, - user_name) - role_obj = find_obj_by_name(dlpx_obj.server_session, role, - role_name) - auth_objs = authorization.get_all(dlpx_obj.server_session) - - try: - - del_auth_str = '({}, {}, {})'.format(user_obj.reference, - role_obj.reference, - target_obj.reference) - for auth_obj in auth_objs: - if auth_obj.name == del_auth_str: - authorization.delete(dlpx_obj.server_session, - auth_obj.reference) - except DlpxException as e: - print_exception('ERROR: Could not delete authorization:\n{}'.format(e)) - print '{} for user {} was deleted successfully'.format(target_name, - user_name) - - -def find_target_type(dlpx_obj, target_type, target_name): - """ - Function to find the target authorization - - :param dlpx_obj: Virtualization Engine session object - :type dlpx_obj: lib.GetSession.GetSession - :param target_type: Type of target for authorization - :param target_name: Name of the target - """ - - target_obj = None - try: - if target_type.lower() == 'group': - target_obj = find_obj_by_name(dlpx_obj.server_session, group, - target_name) - elif target_type.lower() == 'database': - target_obj = find_obj_by_name(dlpx_obj.server_session, database, - target_name) - elif target_type.lower() == 'snapshot': - target_obj = find_obj_by_name(dlpx_obj.server_session, snapshot, - target_name) - except (DlpxException, RequestError, HttpError) as e: - print_exception('Could not find authorization target type ' - '{}:\n{}'.format(target_type, e)) - return target_obj - - -def list_authorization(dlpx_obj): - """ - Function to list authorizations for a given engine - - :param dlpx_obj: Virtualization Engine session object - """ - target_obj = None - - try: - auth_objs = authorization.get_all(dlpx_obj.server_session) - print_info('User, Role, Target, Reference') - for auth_obj in auth_objs: - role_obj = role.get(dlpx_obj.server_session, auth_obj.role) - user_obj = user.get(dlpx_obj.server_session, auth_obj.user) - if auth_obj.target.startswith('USER'): - target_obj = user.get(dlpx_obj.server_session, auth_obj.target) - elif auth_obj.target.startswith('GROUP'): - target_obj = group.get(dlpx_obj.server_session, auth_obj.target) - elif auth_obj.target.startswith('DOMAIN'): - target_obj = User() - target_obj.name = 'DOMAIN' - print '{}, {}, {}, {}'.format(user_obj.name, role_obj.name, - target_obj.name, - auth_obj.reference) - except (RequestError, HttpError, JobError, AttributeError) as e: - print_exception('An error occurred while listing authorizations.:\n' - '{}\n'.format((e))) - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - E.g.: - @run_async - def task1(): - do_something - @run_async - def task2(): - do_something_too - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func - - -@run_async -def main_workflow(engine, dlpx_obj): - """ - This function actually runs the jobs. - Use the @run_async decorator to run this function asynchronously. - This allows us to run against multiple Delphix Engine simultaneously - - engine: Dictionary of engines - :type engine: dict - dlpx_obj: Virtualization Engine session object - :type dlpx_obj: lib.GetSession.GetSession - - """ - - try: - # Setup the connection to the Delphix Engine - dlpx_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - except DlpxException as e: - print_exception('ERROR: js_bookmark encountered an error authenticating' - ' to {} {}:\n{}\n'.format(engine['hostname'], - arguments['--target'], e)) - thingstodo = ["thingtodo"] - try: - with dlpx_obj.job_mode(single_thread): - while (len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0): - if len(thingstodo) > 0: - if arguments['--create']: - create_authorization(dlpx_obj, arguments['--role'], - arguments['--target_type'], - arguments['--target'], - arguments['--user']) - elif arguments['--delete']: - delete_authorization(dlpx_obj, arguments['--role'], - arguments['--target_type'], - arguments['--target'], - arguments['--user']) - elif arguments['--list']: - list_authorization(dlpx_obj) - thingstodo.pop() - # get all the jobs, then inspect them - i = 0 - for j in dlpx_obj.jobs.keys(): - job_obj = job.get(dlpx_obj.server_session, - dlpx_obj.jobs[j]) - print_debug(job_obj) - print_info('{}: : {}'.format( - engine['hostname'], job_obj.job_state)) - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - # If the job is in a non-running state, remove it - # from the running jobs list. - del dlpx_obj.jobs[j] - elif job_obj.job_state in 'RUNNING': - # If the job is in a running state, increment the - # running job count. - i += 1 - print_info('{}: {:d} jobs running.'.format( - engine['hostname'], i)) - # If we have running jobs, pause before repeating the - # checks. - if len(dlpx_obj.jobs) > 0: - sleep(float(arguments['--poll'])) - except (DlpxException, RequestError, JobError, HttpError) as e: - print_exception('\nError in dx_authorization: {}\n{}'.format( - engine['hostname'], e)) - sys.exit(1) - - -def run_job(dlpx_obj, config_file_path): - """ - This function runs the main_workflow aynchronously against all the - servers specified - - :param dlpx_obj: Virtualization Engine session object - :type dlpx_obj: lib.GetSession.GetSession - :param config_file_path: string containing path to configuration file. - :type config_file_path: str - """ - - # Create an empty list to store threads we create. - threads = [] - engine = None - - # If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info('Executing against all Delphix Engines in the dxtools.conf') - try: - # For each server in the dxtools.conf... - for delphix_engine in dlpx_obj.dlpx_engines: - engine = dlpx_obj.dlpx_engines[delphix_engine] - # Create a new thread and add it to the list. - threads.append(main_workflow(engine, dlpx_obj)) - except DlpxException as e: - print_exception('Error encountered in run_job():\n{}'.format(e)) - sys.exit(1) - - elif arguments['--all'] is False: - # Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dlpx_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: {}\n'.format( - arguments['--engine'])) - except (DlpxException, RequestError, KeyError): - raise DlpxException('\nERROR: Delphix Engine {} cannot be ' - 'found in {}. Please check your value and' - ' try again. Exiting.\n'.format( - arguments['--engine'], config_file_path)) - else: - # Else search for a default engine in the dxtools.conf - for delphix_engine in dlpx_obj.dlpx_engines: - if dlpx_obj.dlpx_engines[delphix_engine]['default'] == 'true': - engine = dlpx_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: {}'.format( - dlpx_obj.dlpx_engines[delphix_engine]['hostname'])) - break - - if engine is None: - raise DlpxException('\nERROR: No default engine found. Exiting') - - # run the job against the engine - threads.append(main_workflow(engine, dlpx_obj)) - - # For each thread in the list... - for each in threads: - # join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def time_elapsed(time_start): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - - :param time_start: float containing start time of the script. - """ - return round((time() - time_start)/60, +1) - - -def main(): - # We want to be able to call on these variables anywhere in the script. - global single_thread - global debug - - time_start = time() - single_thread = False - - try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - config_file_path = arguments['--config'] - # Parse the dxtools.conf and put it into a dictionary - dx_session_obj.get_config(config_file_path) - - # This is the function that will handle processing main_workflow for - # all the servers. - run_job(dx_session_obj, config_file_path) - - elapsed_minutes = time_elapsed(time_start) - print_info('script took {:.2f} minutes to get this far.'.format( - elapsed_minutes)) - - # Here we handle what we do when the unexpected happens - except SystemExit as e: - # This is what we use to handle our sys.exit(#) - sys.exit(e) - - except DlpxException as e: - # We use this exception handler when an error occurs in a function call. - print_exception('ERROR: Please check the ERROR message below:\n' - '{}'.format(e.message)) - sys.exit(2) - - except HttpError as e: - # We use this exception handler when our connection to Delphix fails - print_exception('ERROR: Connection failed to the Delphix Engine. Please' - 'check the ERROR message below:\n{}'.format(e.message)) - sys.exit(2) - - except JobError as e: - # We use this exception handler when a job fails in Delphix so that we - # have actionable data - print_exception('A job failed in the Delphix Engine:\n{}'.format(e.job)) - elapsed_minutes = time_elapsed(time_start) - print_exception('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) - sys.exit(3) - - except KeyboardInterrupt: - # We use this exception handler to gracefully handle ctrl+c exits - print_debug('You sent a CTRL+C to interrupt the process') - elapsed_minutes = time_elapsed(time_start) - print_info('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) - except: - # Everything else gets caught here - print_exception('{}\n{}'.format(sys.exc_info()[0], - traceback.format_exc())) - elapsed_minutes = time_elapsed(time_start) - print_info("{} took {:.2f} minutes to get this far".format( - basename(__file__), elapsed_minutes)) - sys.exit(1) - - -if __name__ == "__main__": - # Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - - # Feed our arguments to the main function, and off we go! - main() \ No newline at end of file diff --git a/dx_delete_vdb.py b/dx_delete_vdb.py index ca2d242..37c3ca2 100755 --- a/dx_delete_vdb.py +++ b/dx_delete_vdb.py @@ -1,573 +1,236 @@ -#!/usr/bin/env python -#Adam Bowen - Apr 2016 -#This script deletes a vdb -#requirements -#pip install docopt delphixpy - -#The below doc follows the POSIX compliant standards and allows us to use -#this doc to also define our arguments for the script. This thing is brilliant. -"""Delete a VDB - +#!/usr/bin/env python3 +# Adam Bowen - Apr 2016 +# This script refreshes a vdb +# Updated by Corey Brune Oct 2016 +# requirements +# pip install --upgrade setuptools pip docopt delphixpy + +# The below doc follows the POSIX compliant standards and allows us to use +# this doc to also define our ARGUMENTS for the script. +"""Refresh a vdb Usage: - dx_delete_db.py (--group [--name ] | --all_dbs ) - [-d | --engine | --all] - [--usebackup] [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - dx_delete_db.py (--host [--group ] [--object_type ] - | --object_type [--group ] [--host ] ) - [-d | --engine | --all] - [--usebackup] [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - dx_delete_db.py -h | --help | -v | --version - -Delete a VDB - + dx_delete_vdb.py --vdb + [--engine ] + [--group_name ][--force][--debug] + [--poll ] [--single_thread ] + [--config ] [--logdir ] + dx_delete_vdb.py -h | --help | -v | --version + +Delete a Delphix VDB Examples: - dx_delete_db.py --group "Sources" --object_type dsource --usebackup - dx_delete_db.py --name "Employee Oracle 11G DB" - dx_delete_db.py --host LINUXSOURCE --parallel 2 --usebackup - dx_delete_db.py --host LINUXSOURCE --parallel 4 --usebackup --debug -d landsharkengine - - - + dx_delete_vdb.py --vdb aseTest + dx_delete_vdb.py --vdb aseTest --engine myengine --single_thread False --force Options: - -d Identifier of Delphix engine in dxtools.conf. - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --all_dbs Run against all database objects - --name Name of object in Delphix to execute against. - --group Name of group in Delphix to execute against. - --host Name of environment in Delphix to execute against. - --object_type dsource or vdb. - --usebackup Snapshot using "Most Recent backup". - Available for MSSQL and ASE only. + --vdb Name of the VDB to delete. + --group_name Name of the group to execute against. + --single_thread Run as a single thread. Use True if there are + multiple engines and the operation needs to run + in parallel. + [default: False] + --engine Identifier for Delphix engine in dxtools.conf. + [default: default] --debug Enable debug logging --parallel Limit number of jobs to maxjob --poll The number of seconds to wait between job polls [default: 10] --config The path to the dxtools.conf file - [default: ./dxtools.conf] - --logdir The path to the logfile you want to use. - [default: ./dx_snapshot_db.log] + [default: ./config/dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./logs/dx_delete_vdb.log] + --force Force delete -h --help Show this screen. -v --version Show version. - """ -VERSION="v.0.0.001" - - -from docopt import docopt -import logging -from os.path import basename -import signal import sys import time -import traceback -import json - -from multiprocessing import Process -from time import sleep, time - -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.exceptions import HttpError, JobError -from delphixpy.v1_6_0 import job_context -from delphixpy.v1_6_0.web import database, environment, group, job, source, user -from delphixpy.v1_6_0.web.vo import ASESpecificBackupSyncParameters, ASENewBackupSyncParameters, ASELatestBackupSyncParameters, MSSqlSyncParameters - - -def find_obj_by_name(engine, server, f_class, obj_name): - """ - Function to find objects by name and object class, and return object's reference as a string - You might use this function to find objects like groups. - """ - print_debug(engine["hostname"] + ": Searching objects in the " + f_class.__name__ + " class\n for one named \"" + obj_name +"\"") - obj_ref = '' - - all_objs = f_class.get_all(server) - for obj in all_objs: - if obj.name == obj_name: - print_debug(engine["hostname"] + ": Found a match " + str(obj.reference)) - return obj - -def find_all_databases_by_group_name(engine, server, group_name, exclude_js_container=False): - """ - Easy way to quickly find databases by group name - """ - - #First search groups for the name specified and return its reference - group_obj = find_obj_by_name(engine, server, group, group_name) - if group_obj: - databases=database.get_all(server, group=group_obj.reference, no_js_container_data_source=exclude_js_container) - return databases - -def find_database_by_name_and_group_name(engine, server, group_name, database_name): - - databases = find_all_databases_by_group_name(engine, server, group_name) - - for each in databases: - if each.name == database_name: - print_debug(engine["hostname"] + ": Found a match " + str(each.reference)) - return each - print_info("Unable to find \"" + database_name + "\" in " + group_name) - -def find_source_by_database(engine, server, database_obj): - #The source tells us if the database is enabled/disables, virtual, vdb/dSource, or is a staging database. - source_obj = source.get_all(server, database=database_obj.reference) - #We'll just do a little sanity check here to ensure we only have a 1:1 result. - if len(source_obj) == 0: - print_error(engine["hostname"] + ": Did not find a source for " + database_obj.name + ". Exiting") - sys.exit(1) - elif len(source_obj) > 1: - print_error(engine["hostname"] + ": More than one source returned for " + database_obj.name + ". Exiting") - print_error(source_obj) - sys.exit(1) - return source_obj - -def get_config(config_file_path): - """ - This function reads in the dxtools.conf file - """ - #First test to see that the file is there and we can open it - try: - config_file = open(config_file_path).read() - except: - print_error("Was unable to open " + config_file_path + ". Please check the path and permissions, then try again.") - sys.exit(1) - #Now parse the file contents as json and turn them into a python dictionary, throw an error if it isn't proper json - try: - config = json.loads(config_file) - except: - print_error("Was unable to read " + config_file_path + " as json. Please check file in a json formatter and try again.") - sys.exit(1) - #Create a dictionary of engines (removing the data node from the dxtools.json, for easier parsing) - delphix_engines = {} - for each in config['data']: - delphix_engines[each['hostname']] = each - print_debug(delphix_engines) - return delphix_engines - -def logging_est(logfile_path): - """ - Establish Logging - """ - global debug - logging.basicConfig(filename=logfile_path,format='%(levelname)s:%(asctime)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') - print_info("Welcome to " + basename(__file__) + ", version " + VERSION) - global logger - debug = arguments['--debug'] - logger = logging.getLogger() - if debug == True: - logger.setLevel(10) - print_info("Debug Logging is enabled.") - -def job_mode(server): - """ - This function tells Delphix how to execute jobs, based on the single_thread variable at the beginning of the file - """ - #Synchronously (one at a time) - if single_thread == True: - job_m = job_context.sync(server) - print_debug("These jobs will be executed synchronously") - #Or asynchronously - else: - job_m = job_context.async(server) - print_debug("These jobs will be executed asynchronously") - return job_m - -def job_wait(): - """ - This job stops all work in the thread/process until jobs are completed. - """ - #Grab all the jos on the server (the last 25, be default) - all_jobs = job.get_all(server) - #For each job in the list, check to see if it is running (not ended) - for jobobj in all_jobs: - if not (jobobj.job_state in ["CANCELED", "COMPLETED", "FAILED"]): - print_debug("Waiting for " + jobobj.reference + " (currently: " + jobobj.job_state+ ") to finish running against the container") - #If so, wait - job_context.wait(server,jobobj.reference) - -def on_exit(sig, func=None): - """ - This function helps us end cleanly and with exit codes - """ - print_info("Shutdown Command Received") - print_info("Shutting down " + basename(__file__)) - sys.exit(0) - -def print_debug(print_obj): - """ - Call this function with a log message to prefix the message with DEBUG - """ - try: - if debug == True: - print "DEBUG: " + str(print_obj) - logging.debug(str(print_obj)) - except: - pass - -def print_error(print_obj): - """ - Call this function with a log message to prefix the message with ERROR - """ - print "ERROR: " + str(print_obj) - logging.error(str(print_obj)) - -def print_info(print_obj): - """ - Call this function with a log message to prefix the message with INFO - """ - print "INFO: " + str(print_obj) - logging.info(str(print_obj)) - -def print_warning(print_obj): - """ - Call this function with a log message to prefix the message with WARNING - """ - print "WARNING: " + str(print_obj) - logging.warning(str(print_obj)) - -def serversess(f_engine_address, f_engine_username, f_engine_password): - """ - Function to setup the session with the Delphix Engine - """ - server_session= DelphixEngine(f_engine_address, f_engine_username, f_engine_password, "DOMAIN") - return server_session - -def set_exit_handler(func): - """ - This function helps us set the correct exit code - """ - signal.signal(signal.SIGTERM, func) - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - - E.g.: - @run_async - def task1(): - do_something - - @run_async - def task2(): - do_something_too - - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps +from os.path import basename - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl +import docopt + +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import database +from delphixpy.v1_10_2.web import vo +from lib import dlpx_exceptions +from lib import dx_logging +from lib import get_references +from lib import get_session +from lib import run_job +from lib.run_async import run_async + +VERSION = "v.0.3.002" + + +def delete_vdb(dlpx_obj, vdb_name, force_delete): + vdb_list = vdb_name.split(":") + for vdb in vdb_list: + dx_logging.print_info(f"delete_vdb for: {vdb}") + container_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, database, vdb + ) + # Check to make sure our container object has a reference + source_obj = get_references.find_source_by_db_name(dlpx_obj.server_session, vdb) + if container_obj.reference: + try: + if source_obj.virtual is not True or source_obj.staging is True: + raise dlpx_exceptions.DlpxException( + f"ERROR: {container_obj.name} is not a virtual object" + ) + else: + dx_logging.print_info( + f"INFO: Deleting {container_obj.name} on engine {dlpx_obj.server_session.address}" + ) + delete_params = None + if force_delete and str(container_obj.reference).startswith( + "MSSQL" + ): + delete_params = vo.DeleteParameters() + delete_params.force = True + try: + dx_logging.print_info(f"triggering delete for: {vdb}") + database.delete( + dlpx_obj.server_session, + container_obj.reference, + delete_params, + ) + dx_logging.print_info(f"delete for: {vdb} is running") + dlpx_obj.jobs[dlpx_obj.server_session.address].append( + dlpx_obj.server_session.last_job + ) + except ( + dlpx_exceptions.DlpxException, + exceptions.RequestError, + exceptions.HttpError, + ) as err: + raise dlpx_exceptions.DlpxException(f"{err}") + # This exception is raised if refreshing a vFiles VDB since + # AppDataContainer does not have virtual, staging or enabled attributes + except AttributeError as err: + dx_logging.print_exception( + f"ERROR: Deleting {container_obj.name} on engine {dlpx_obj.server_session.address}" + ) + dx_logging.print_exception(f"AttributeError:{err}") + except dlpx_exceptions.DlpxException as err: + dx_logging.print_exception(f"DlpxException:{err}") + except Exception as err: + dx_logging.print_exception(f"Exception:\n{err}") + dx_logging.print_info(f" Delete operation has been initiated for vdbs ") - return async_func @run_async -def main_workflow(engine): +def main_workflow(engine, dlpx_obj, single_thread): """ - This function is where the main workflow resides. + This function is where we create our main workflow. Use the @run_async decorator to run this function asynchronously. - This allows us to run against multiple Delphix Engine simultaneously + The @run_async decorator allows us to run against multiple Delphix Engine + simultaneously + :param engine: Dictionary of engines + :type engine: dictionary + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param single_thread: True - run single threaded, False - run multi-thread + :type single_thread: bool """ - - #Pull out the values from the dictionary for this engine - engine_address = engine["ip_address"] - engine_username = engine["username"] - engine_password = engine["password"] - #Establish these variables as empty for use later - databases = [] - environment_obj = None - source_objs = None - jobs = {} - - - #Setup the connection to the Delphix Engine - server = serversess(engine_address, engine_username, engine_password) - - #If an environment/server was specified - if host_name: - print_debug(engine["hostname"] + ": Getting environment for " + host_name) - #Get the environment object by the hostname - environment_obj = find_obj_by_name(engine, server, environment, host_name) - if environment_obj != None: - #Get all the sources running on the server - env_source_objs = source.get_all(server, environment=environment_obj.reference) - #If the server doesn't have any objects, exit. - if env_source_objs == None: - print_error(host_name + "does not have any objects. Exiting") - sys.exit(1) - #If we are only filtering by the server, then put those objects in the main list for processing - if not(arguments['--group'] and database_name): - source_objs = env_source_objs - all_dbs = database.get_all(server, no_js_container_data_source=False) - databases = [] - for source_obj in source_objs: - if source_obj.staging == False and source_obj.virtual == True: - database_obj = database.get(server, source_obj.container) - if database_obj in all_dbs: - databases.append(database_obj) - else: - print_error(engine["hostname"] + ":No environment found for " + host_name + ". Exiting") - sys.exit(1) - #If we specified a specific database by name.... - if arguments['--name']: - #Get the database object from the name - database_obj = find_database_by_name_and_group_name(engine, server, arguments['--group'], arguments['--name']) - if database_obj: - databases.append(database_obj) - #Else if we specified a group to filter by.... - elif arguments['--group']: - print_debug(engine["hostname"] + ":Getting databases in group " + arguments['--group']) - #Get all the database objects in a group. - databases = find_all_databases_by_group_name(engine, server, arguments['--group']) - #Else, if we said all vdbs ... - elif arguments['--all_dbs'] and not arguments['--host'] : - #Grab all databases - databases = database.get_all(server, no_js_container_data_source=False) - elif arguments['--object_type'] and not arguments['--host'] : - databases = database.get_all(server) - if not databases or len(databases) == 0: - print_error("No databases found with the criterion specified") + try: + # Setup the connection to the Delphix DDP + dx_logging.print_info(f"Executing main_workflow") + dlpx_obj.dlpx_session( + engine["ip_address"], + engine["username"], + engine["password"], + engine["use_https"], + ) + except dlpx_exceptions.DlpxException as err: + dx_logging.print_exception( + f"ERROR: dx_delete_vdb encountered an error authenticating to " + f'{engine["ip_address"]}:{err}' + ) + dx_logging.print_exception( + f"Cannot continue operation on " f'{engine["ip_address"]}' + ) return - #reset the running job count before we begin - i = 0 - with job_mode(server): - #While there are still running jobs or databases still to process.... - while (len(jobs) > 0 or len(databases) > 0): - #While there are databases still to process and we are still under - #the max simultaneous jobs threshold (if specified) - while len(databases) > 0 and (arguments['--parallel'] == None or i < int(arguments['--parallel'])): - #Give us the next database in the list, and remove it from the list - database_obj = databases.pop() - #Get the source of the database. - #The source tells us if the database is enabled/disables, virtual, vdb/dSource, or is a staging database. - source_obj = find_source_by_database(engine, server, database_obj) - #If we applied the environment/server filter AND group filter, find the intersecting matches - if environment_obj != None and (arguments['--group']): - match = False - for env_source_obj in env_source_objs: - if source_obj[0].reference in env_source_obj.reference: - match = True - break - if match == False: - print_error(engine["hostname"] + ": " + database_obj.name + " does not exist on " + host_name + ". Exiting") - return - #Snapshot the database - delete_job = delete_database(engine, server, jobs, source_obj[0], database_obj, arguments['--object_type']) - #If delete_job has any value, then we know that a job was initiated. - if delete_job: - #increment the running job count - i += 1 - #Check to see if we are running at max parallel processes, and report if so. - if ( arguments['--parallel'] != None and i >= int(arguments['--parallel'])): - print_info(engine["hostname"] + ": Max jobs reached (" + str(i) + ")") - #reset the running jobs counter, as we are about to update the count from the jobs report. - i = update_jobs_dictionary(engine, server, jobs) - print_info(engine["hostname"] + ": " + str(i) + " jobs running. " + str(len(databases)) + " jobs waiting to run") - #If we have running jobs, pause before repeating the checks. - if len(jobs) > 0: - sleep(float(arguments['--poll'])) - -def run_job(engine): - """ - This function runs the main_workflow aynchronously against all the servers specified - """ - #Create an empty list to store threads we create. - threads = [] - #If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info("Executing against all Delphix Engines in the dxtools.conf") - #For each server in the dxtools.conf... - for delphix_engine in dxtools_objects: - engine = dxtools_objects[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine)) - else: - #Else if the --engine argument was given, test to see if the engine exists in dxtools.conf - if arguments['--engine']: - try: - engine = dxtools_objects[arguments['--engine']] - print_info("Executing against Delphix Engine: " + arguments['--engine']) - except: - print_error("Delphix Engine \"" + arguments['--engine'] + "\" cannot be found in " + config_file_path) - print_error("Please check your value and try again. Exiting") - sys.exit(1) - #Else if the -d argument was given, test to see if the engine exists in dxtools.conf - elif arguments['-d']: - try: - engine = dxtools_objects[arguments['-d']] - print_info("Executing against Delphix Engine: " + arguments['-d']) - except: - print_error("Delphix Engine \"" + arguments['-d'] + "\" cannot be found in " + config_file_path) - print_error("Please check your value and try again. Exiting") - sys.exit(1) - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dxtools_objects: - if dxtools_objects[delphix_engine]['default'] == 'true': - engine = dxtools_objects[delphix_engine] - print_info("Executing against the default Delphix Engine in the dxtools.conf: " + dxtools_objects[delphix_engine]['hostname']) - break - if engine == None: - print_error("No default engine found. Exiting") - sys.exit(1) - #run the job against the engine - threads.append(main_workflow(engine)) - - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete before moving on - each.join() - -def delete_database(engine, server, jobs, source_obj, container_obj, obj_type=None): - """ - This function - FYI - Snapshot is also called sync - """ - #Sanity check to make sure our source object has a reference - if source_obj.reference != None : - #If we specified the --object_type flag, ensure this source is a match. Skip, if not. - if obj_type != None and ((obj_type.lower() == "vdb" and source_obj.virtual != True ) or (obj_type.lower() == "dsource" and source_obj.virtual != False )): - print_warning(engine["hostname"] + ": " + container_obj.name + " is not a " + obj_type.lower() + ". Skipping sync") - #Ensure this source is not a staging database. We can't act upon those. - elif source_obj.staging == True: - print_warning(engine["hostname"] + ": " + container_obj.name + " is a staging database. Skipping.") - #Ensure the source is enabled. We can't snapshot disabled databases. - else: - print_info(engine["hostname"] + ": Deleting " + container_obj.name ) - print_debug(engine["hostname"] + ": Type: " + source_obj.type ) - print_debug(engine["hostname"] + ": " +source_obj.type) - #Delete it - database.delete(server, container_obj.reference) - #Add the job into the jobs dictionary so we can track its progress - jobs[container_obj] = server.last_job - #return the job object to the calling statement so that we can tell if a job was created or not (will return None, if no job) - return server.last_job - -def time_elapsed(): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - """ - elapsed_minutes = round((time() - time_start)/60, +1) - return elapsed_minutes - -def update_jobs_dictionary(engine, server, jobs): - """ - This function checks each job in the dictionary and updates its status or removes it if the job is complete. - Return the number of jobs still running. - """ - #Establish the running jobs counter, as we are about to update the count from the jobs report. - i = 0 - #get all the jobs, then inspect them - for j in jobs.keys(): - job_obj = job.get(server, jobs[j]) - print_debug(engine["hostname"] + ": " + str(job_obj)) - print_info(engine["hostname"] + ": " + j.name + ": " + job_obj.job_state) - - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - #If the job is in a non-running state, remove it from the running jobs list. - del jobs[j] - else: - #If the job is in a running state, increment the running job count. - i += 1 - return i - -def main(argv): - #We want to be able to call on these variables anywhere in the script. - global single_thread - global usebackup - global time_start - global host_name - global database_name - global config_file_path - global dxtools_objects - - - try: - #Declare globals that will be used throughout the script. - logging_est(arguments['--logdir']) - print_debug(arguments) - time_start = time() - engine = None - single_thread = False - usebackup = arguments['--usebackup'] - database_name = arguments['--name'] - host_name = arguments['--host'] - config_file_path = arguments['--config'] - #Parse the dxtools.conf and put it into a dictionary - dxtools_objects = get_config(config_file_path) - - #This is the function that will handle processing main_workflow for all the servers. - run_job(engine) - - elapsed_minutes = time_elapsed() - print_info("script took " + str(elapsed_minutes) + " minutes to get this far.") - + with dlpx_obj.job_mode(single_thread): + delete_vdb(dlpx_obj, ARGUMENTS["--vdb"], ARGUMENTS["--force"]) + # locking threads + run_job.track_running_jobs(engine, dlpx_obj, 5) + except ( + dlpx_exceptions.DlpxException, + dlpx_exceptions.DlpxObjectNotFound, + exceptions.RequestError, + exceptions.JobError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f'Error in dx_delete_vdb on Delpihx Engine: * {engine["ip_address"]} * : {err}' + ) + + +def main(): + """ + main function - creates session and runs jobs + """ + time_start = time.time() + try: + dx_session_obj = get_session.GetSession() + dx_logging.logging_est(ARGUMENTS["--logdir"]) + config_file_path = ARGUMENTS["--config"] + single_thread = ARGUMENTS["--single_thread"] + engine = ARGUMENTS["--engine"] + dx_session_obj.get_config(config_file_path) + # This is the function that will handle processing main_workflow for all the servers. + t = run_job.run_job_mt(main_workflow, dx_session_obj, engine, single_thread) + dx_logging.print_info( + f"delete operation(s) are in progress. Process will terminate once all operations are complete." + ) + for each in t: + # join them back together so that we wait for all threads to complete + each.join() + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"delete operation took {elapsed_minutes} minutes to complete." + ) + # Here we handle what we do when the unexpected happens + except SystemExit as err: + # This is what we use to handle our sys.exit(#) + sys.exit(err) + + except dlpx_exceptions.DlpxException as err: + # We use this exception handler when an error occurs in a function + # call. + dx_logging.print_exception( + f"ERROR: Please check the ERROR message below:\n {err.error}" + ) + sys.exit(2) - #Here we handle what we do when the unexpected happens - except SystemExit as e: - """ - This is what we use to handle our sys.exit(#) - """ - sys.exit(e) - except HttpError as e: - """ - We use this exception handler when our connection to Delphix fails - """ - print_error("Connection failed to the Delphix Engine") - print_error( "Please check the ERROR message below") - print_error(e.message) + except exceptions.HttpError as err: + # We use this exception handler when our connection to Delphix fails + dx_logging.print_exception( + f"ERROR: Connection failed to the Delphix DDP. Please check the ERROR message below:\n{err.status}" + ) sys.exit(2) - except JobError as e: - """ - We use this exception handler when a job fails in Delphix so that we have actionable data - """ - print_error("A job failed in the Delphix Engine") - print_error(e.job) - elapsed_minutes = time_elapsed() - print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") + + except exceptions.JobError as err: + # We use this exception handler when a job fails in Delphix so that we + # have actionable data + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_exception( + f"A job failed in the Delphix Engine:\n{err.job}." + f"{basename(__file__)} took {elapsed_minutes} minutes to complete" + ) sys.exit(3) + except KeyboardInterrupt: - """ - We use this exception handler to gracefully handle ctrl+c exits - """ - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") - except: - """ - Everything else gets caught here - """ - print_error(sys.exc_info()[0]) - print_error(traceback.format_exc()) - elapsed_minutes = time_elapsed() - print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") - sys.exit(1) + # We use this exception handler to gracefully handle ctrl+c exits + dx_logging.print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"{basename(__file__)} took {elapsed_minutes} minutes to complete." + ) + if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - #I added this below condition to account for my --name | or AT LEAST ONE OF --group --host --object_type - #I couldn't quite sort it out with docopt. Maybe I'm just dense today. - #Anyway, if none of the four options are given, print the __doc__ and exit. - if not(arguments['--name']) and not(arguments['--group']) and not(arguments['--host']) and not(arguments['--object_type']) and not(arguments['--all_dbs']): - print(__doc__) - sys.exit() - #Feed our arguments to the main function, and off we go! - print arguments - main(arguments) \ No newline at end of file + # Grab our ARGUMENTS from the doc at the top of the script + ARGUMENTS = docopt.docopt(__doc__, version=basename(__file__) + " " + VERSION) + # Feed our ARGUMENTS to the main function, and off we go! + main() diff --git a/dx_environment.py b/dx_environment.py index c7f3855..7e27a9d 100755 --- a/dx_environment.py +++ b/dx_environment.py @@ -1,691 +1,550 @@ -#!/usr/bin/env python -#Corey Brune 08 2016 -#This script creates an environment -#requirements -#pip install docopt delphixpy - -#The below doc follows the POSIX compliant standards and allows us to use -#this doc to also define our arguments for the script. +#!/usr/bin/env python3 +# Corey Brune 08 2016 +# This script creates an environment in Delphix +# The below doc follows the POSIX compliant standards and allows us to +# define our ARGUMENTS for the script. """Create Host Environment Usage: - dx_environment.py (--type --env_name --host_user \ ---ip
[--toolkit ] [--ase --ase_user --ase_pw ] \ -|--update_ase_pw --env_name | --update_ase_user --env_name \ -| --delete | --refresh | --list) -[--logdir ][--debug] [--config ] [--connector_name ] -[--pw ][--engine ][--all] [--poll ] - dx_environment.py (--update_host --old_host_address --new_host_address ) [--logdir ][--debug] [--config ] - dx_environment.py ([--enable]|[--disable]) --env_name [--logdir ][--debug] [--config ] - dx_environment.py -h | --help | -v | --version - -Create a Delphix environment. (current support for standalone environments only) + dx_environment.py ( [--list] | [--create] | [--enable] | [--disable] | [--delete] | [--refresh] | [--update_host]) + [--os_type ] + [--host_type ] + [--toolkit ] + [--env_name ] + [--passwd ] + [--connector_host_name ] + [--ip ] + [--old_host_address ] + [--new_host_address ] + [--single_thread ] + [--host_user ] + [--logdir ] + [--config ] + [--engine ] + [--poll ] + dx_environment.py -h | --help | -v | --version + +Create a Delphix environment. +- current support for standalone environments only +- does not support password updates. Examples: - dx_environment.py --engine landsharkengine --type linux --env_name test1 --host_user delphix --pw delphix --ip 182.1.1.1 --toolkit /var/opt/delphix - dx_environment.py --type linux --env_name test1 --update_ase_pw newPasswd - dx_environment.py --type linux --env_name test1 --host_user delphix --pw delphix --ip 182.1.1.1 --toolkit /var/opt/delphix - dx_environment.py --update_host --host_name 10.0.3.60 - dx_environment.py --type linux --env_name test1 --host_user delphix --pw delphix --ip 182.1.1.1 --toolkit /var/opt/delphix --ase --ase_user sa --ase_pw delphixpw - dx_environment.py --type windows --env_name SOURCE --host_user delphix.local\\administrator --ip 10.0.1.50 --toolkit foo --config dxtools.conf --pw 'myTempPassword123!' --debug --connector_name 10.0.1.60 - dx_environment.py --enable --env_name SOURCE - dx_environment.py --disable --env_name SOURCE dx_environment.py --list + dx_environment.py --create --engine mymask --os_type Linux --env_name oratgt \ + --host_user delphix --passwd xxxx --ip 10.0.1.30 --toolkit /home/delphix + dx_environment.py --create --engine mymask --os_type Windows --env_name wintgt \ + --host_user delphix\dephix_trgt --passwd xxxx --ip 10.0.1.60 --connector_host_name wintgt + dx_environment.py --create --os_type Windows --env_name winsrc \ + --host_user delphix\delphix_src --passwd delphix --ip 10.0.1.50 --connector_host_name wintgt + dx_environment.py --enable --engine mymask --env_name oratgt + dx_environment.py --disable --engine mymask --env_name oratgt + dx_environment.py --refresh --engine mymask --env_name oratgt + dx_environment.py --delete --engine mymask --env_name oratgt + dx_environment.py --update_host --engine mymask --old_host_address 10.0.1.20 --new_host_address 10.0.1.30 + + Options: - --type The OS type for the environment - --env_name The name of the Delphix environment - --ip The IP address of the Delphix environment - --list List all of the environments for a given engine - --toolkit Path of the toolkit. Required for Unix/Linux - --host_user The username on the Delphix environment - --delete The name of the Delphix environment to delete - --update_ase_pw The new ASE DB password - --refresh The name of the Delphix environment to refresh. Specify "all" to refresh all environments - --pw Password of the user - --connector_name The name of the Delphix connector to use. Required for Windows source environments - --update_ase_user Update the ASE DB username - --ase Flag to enable ASE environments - --ase_user The ASE DB username - --ase_pw Password of the ASE DB user - --all Run against all engines. - --debug Enable debug logging - --parallel Limit number of jobs to maxjob - --engine Identifier of Delphix engine in dxtools.conf. - - --poll The number of seconds to wait between job polls - [default: 10] - --config The path to the dxtools.conf file - [default: ./dxtools.conf] - --logdir The path to the logfile you want to use. - [default: ./dx_environment.log] - -h --help Show this screen. - -v --version Show version. - --update_host Update the host address for an environment - --old_host_address The current name of the host, as registered in Delphix. Required for update_host - --new_host_address The desired name of the host, as registered in Delphix. Required for update_host - --enable Enable the named environment - --disable Disable the named environment + --os_type The OS type for the environment + --env_name The name of the Delphix environment + --toolkit Path of the toolkit. Required for Unix/Linux + --connector_host_name The name of the Delphix connector Host to use. + Required for Windows source environments + --ip The IP address of the Delphix environment + --host_user The username on the Delphix environment + --passwd Password of the user + --ase_user The ASE DB username + --ase_pw Password of the ASE DB user + --update_ase_user Update the ASE DB username + --update_ase_pw The new ASE DB password + --old_host_address The current name of the host, as registered in + Delphix. Required for update + --new_host_address The desired name of the host, as registered in + Delphix. Required for update + --engine Identifier of Delphix engine in dxtools.conf. + [default: default] + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./config/dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./logs/dx_environment.log] + -h --help Show this screen. + -v --version Show version. + --single_thread Run as a single thread? True or False + [default: False] """ -VERSION="v.0.3.612" - -from docopt import docopt -from os.path import basename import sys -import traceback -from time import sleep, time - -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.web import environment -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web import host -from delphixpy.v1_8_0.web.vo import UnixHostEnvironment -from delphixpy.v1_8_0.web.vo import ASEHostEnvironmentParameters -from delphixpy.v1_8_0.web.vo import HostEnvironmentCreateParameters -from delphixpy.v1_8_0.web.vo import WindowsHostEnvironment -from delphixpy.v1_8_0.web.vo import WindowsHost -from delphixpy.v1_8_0.web.vo import UnixHost - -from lib.DlpxException import DlpxException -from lib.GetSession import GetSession -from lib.GetReferences import find_obj_by_name -from lib.GetReferences import find_obj_name -from lib.GetReferences import find_all_objects -from lib.DxLogging import logging_est -from lib.DxLogging import print_info -from lib.DxLogging import print_debug -from lib.DxLogging import print_exception +import time +from os.path import basename + +import docopt + +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import environment +from delphixpy.v1_10_2.web import host +from delphixpy.v1_10_2.web import vo +from lib import dlpx_exceptions +from lib import dx_logging +from lib import get_references +from lib import get_session +from lib import run_job +from lib.run_async import run_async + +VERSION = "v.0.3.616" def enable_environment(dlpx_obj, env_name): """ Enable the given host + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param env_name: Environment name in Delphix + :type env_name: str """ - engine_name = dlpx_obj.dlpx_engines.keys()[0] - - env_obj = find_obj_by_name(dlpx_obj.server_session, - environment, env_name) - + env_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, environment, env_name + ) try: - environment.enable(dlpx_obj.server_session,env_obj.reference) - print('Attempting to enable {}'.format(env_name)) - except (DlpxException, RequestError) as e: - print_exception('\nERROR: Enabling the host {} ' - 'encountered an error:\n{}'.format(env_name, e)) - sys.exit(1) + environment.enable(dlpx_obj.server_session, env_obj.reference) + except (dlpx_exceptions.DlpxException, exceptions.RequestError) as err: + dx_logging.print_exception( + f"ERROR: Enabling the host {env_name} " f"encountered an error:\n{err}" + ) -def disable_environment(dlpx_obj,env_name): +def disable_environment(dlpx_obj, env_name): """ - Enable the given host + Enable a Delphix environment + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param env_name: Environment name in Delphix + :type env_name: str """ - engine_name = dlpx_obj.dlpx_engines.keys()[0] - env_obj = find_obj_by_name(dlpx_obj.server_session, - environment, env_name) - + env_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, environment, env_name + ) try: - environment.disable(dlpx_obj.server_session,env_obj.reference) - print('Attempting to disable {}'.format(env_name)) - except (DlpxException, RequestError) as e: - print_exception('\nERROR: Disabling the host {} ' - 'encountered an error:\n{}'.format(env_name, e)) - sys.exit(1) + environment.disable(dlpx_obj.server_session, env_obj.reference) + except (dlpx_exceptions.DlpxException, exceptions.RequestError) as err: + dx_logging.print_exception( + f"ERROR: Disabling the host {env_name} " f"encountered an error:\n{err}" + ) def update_host_address(dlpx_obj, old_host_address, new_host_address): """ - Update the given host + Update the environment + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param old_host_address: Original IP address of environment + :type old_host_address: str + :param new_host_address: New IP address of the environment + :type new_host_address: str """ - engine_name = dlpx_obj.dlpx_engines.keys()[0] - old_host_obj = find_obj_by_name(dlpx_obj.server_session, - host, old_host_address) + old_host_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, host, old_host_address + ) if old_host_obj.type == "WindowsHost": - host_obj = WindowsHost() + host_obj = vo.WindowsHost() else: - host_obj = UnixHost() + host_obj = vo.UnixHost() host_obj.address = new_host_address try: - host.update(dlpx_obj.server_session, old_host_obj.reference, host_obj) + host.update(dlpx_obj.server_session, old_host_obj.reference, host_obj) - print('Attempting to update {} to {}'.format(old_host_address, new_host_address)) - - except (DlpxException, RequestError) as e: - print_exception('\nERROR: Updating the host {} ' - 'encountered an error:\n{}'.format(env_name, e)) - sys.exit(1) + except (dlpx_exceptions.DlpxException, exceptions.RequestError) as err: + dx_logging.print_exception( + f"ERROR: Updating the host {host_obj.name} " f"encountered an error:\n{err}" + ) def list_env(dlpx_obj): """ - List all environments for a given engine + List all environments for the engine + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object """ - engine_name = dlpx_obj.dlpx_engines.keys()[0] - all_envs = environment.get_all(dlpx_obj.server_session) + if not all_envs: + dx_logging.print_info( + f"There are no environments on engine:{dlpx_obj.server_session.address}" + ) + return + env_host = "" for env in all_envs: - env_user = find_obj_name(dlpx_obj.server_session, - environment.user, env.primary_user) + env_user = get_references.find_obj_name( + dlpx_obj.server_session, environment.user, env.primary_user + ) try: - env_host = find_obj_name(dlpx_obj.server_session, host, env.host) + env_host = get_references.find_obj_name( + dlpx_obj.server_session, host, env.host + ) except AttributeError: pass - - if env.type == 'WindowsHostEnvironment': - print('Environment Name: {}, Username: {}, Host: {},' - 'Enabled: {}, '.format(env.name, env_user, env_host, - env.enabled)) - elif env.type == 'WindowsCluster' or env.type == 'OracleCluster': - print('Environment Name: {}, Username: {}' \ - 'Enabled: {}, '.format(env.name, env_user, env.enabled)) + if env.type == "WindowsHostEnvironment": + print( + f"Environment Name: {env.name}, Username: {env_user}, " + f"Host: {env_host},Enabled: {env.enabled}" + ) + elif env.type == "WindowsCluster" or env.type == "OracleCluster": + print( + f"Environment Name: {env.name}, Username: {env_user}" + f"Enabled: {env.enabled}, " + ) else: - print 'Environment Name: {}, Username: {}, Host: {}, Enabled: {},'\ - ' ASE Environment Params: {}'.format( - env.name, env_user, env_host, env.enabled, - env.ase_host_environment_parameters if - isinstance(env.ase_host_environment_parameters, - ASEHostEnvironmentParameters) else 'Undefined') + print( + f"Environment Name: {env.name}, Username: {env_user}, " + f"Host: {env_host}, Enabled: {env.enabled}, " + f"ASE Environment Params: " + f'{env.ase_host_environment_parameters if isinstance(env.ase_host_environment_parameters,vo.ASEHostEnvironmentParameters) else "Undefined"}' + ) def delete_env(dlpx_obj, env_name): """ Deletes an environment - - engine: Dictionary of engines - env_name: Name of the environment to delete + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param env_name: Name of the environment to delete + :type env_name: str """ - engine_name = dlpx_obj.dlpx_engines.keys()[0] - - env_obj = find_obj_by_name(dlpx_obj.server_session, environment, - env_name) - + env_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, environment, env_name + ) if env_obj: environment.delete(dlpx_obj.server_session, env_obj.reference) - dlpx_obj.jobs[engine_name] = \ - dlpx_obj.server_session.last_job - + dlpx_obj.jobs[ + dlpx_obj.server_session.address + ] = dlpx_obj.server_session.last_job elif env_obj is None: - print('Environment was not found in the Engine: {}'.format(env_name)) - sys.exit(1) + dlpx_exceptions.DlpxObjectNotFound(f"Environment was not found: {env_name}") def refresh_env(dlpx_obj, env_name): """ Refresh the environment - - engine: Dictionary of engines - env_name: Name of the environment to refresh + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :parm env_name: Name of the environment to refresh + :type env_name: str """ - engine_name = dlpx_obj.dlpx_engines.keys()[0] - if env_name == "all": - env_list = find_all_objects(dlpx_obj.server_session, environment) - for env_obj in env_list: - try: - environment.refresh(dlpx_obj.server_session, env_obj.reference) - dlpx_obj.jobs[engine_name] = \ - dlpx_obj.server_session.last_job - - except (DlpxException, RequestError) as e: - print_exception('\nERROR: Refreshing the environment {} ' - 'encountered an error:\n{}'.format(env_name, e)) - sys.exit(1) + env_list = get_references.find_all_objects(dlpx_obj.server_session, environment) + for env_obj in env_list: + try: + environment.refresh(dlpx_obj.server_session, env_obj.reference) + dlpx_obj.jobs[ + dlpx_obj.server_session.address + ] = dlpx_obj.server_session.last_job + except (dlpx_exceptions.DlpxException, exceptions.RequestError) as err: + dlpx_exceptions.DlpxException( + f"Encountered an error while refreshing {env_name}: {err}" + ) else: - - try: - env_obj = find_obj_by_name(dlpx_obj.server_session, environment, - env_name) - - environment.refresh(dlpx_obj.server_session, env_obj.reference) - dlpx_obj.jobs[engine_name] = \ - dlpx_obj.server_session.last_job - - except (DlpxException, RequestError) as e: - print_exception('\nERROR: Refreshing the environment {} ' - 'encountered an error:\n{}'.format(env_name, e)) - sys.exit(1) - - -def update_ase_username(dlpx_obj): - """ - Update the ASE database user password - """ - engine_name = dlpx_obj.dlpx_engines.keys()[0] - - env_obj = UnixHostEnvironment() - env_obj.ase_host_environment_parameters = ASEHostEnvironmentParameters() - env_obj.ase_host_environment_parameters.db_user = \ - arguments['--update_ase_user'] - - try: - environment.update(dlpx_obj.server_session, find_obj_by_name( - dlpx_obj.server_session, environment, arguments['--env_name'], - env_obj).reference, env_obj) - - except (HttpError, RequestError) as e: - print_exception('\nERROR: Updating the ASE DB password ' - 'failed:\n{}\n'.format(e)) - - -def update_ase_pw(dlpx_obj): - """ - Update the ASE database user password - """ - engine_name = dlpx_obj.dlpx_engines.keys()[0] - env_obj = UnixHostEnvironment() - env_obj.ase_host_environment_parameters = ASEHostEnvironmentParameters() - env_obj.ase_host_environment_parameters.credentials = {'type': - 'PasswordCredential', - 'password': - arguments['--update_ase_pw']} - - try: - environment.update(dlpx_obj.server_session, find_obj_by_name( - dlpx_obj.server_session, environment, arguments['--env_name'], - env_obj).reference, env_obj) - - except (HttpError, RequestError) as e: - print_exception('\nERROR: Updating the ASE DB password ' - 'failed:\n{}\n'.format(e)) - - -def create_linux_env(dlpx_obj, env_name, host_user, ip_addr, toolkit_path, - pw=None): - + try: + env_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, environment, env_name + ) + environment.refresh(dlpx_obj.server_session, env_obj.reference) + dlpx_obj.jobs[ + dlpx_obj.server_session.address + ] = dlpx_obj.server_session.last_job + except (dlpx_exceptions.DlpxException, exceptions.RequestError) as err: + raise dlpx_exceptions.DlpxException( + f"Refreshing {env_name} encountered an error:\n{err}" + ) + + +def create_linux_env( + dlpx_obj, + env_name, + host_user, + ip_addr, + toolkit_path, + passwd=None, + ase_user=None, + ase_pw=None, +): """ Create a Linux environment. - - env_name: The name of the environment - host_user: The server account used to authenticate - ip_addr: DNS name or IP address of the environment - toolkit_path: Path to the toolkit. Note: This directory must be - writable by the host_user - pw: Password of the user. Default: None (use SSH keys instead) + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param env_name: The name of the environment + :type env_name: str + :param host_user: The server account used to authenticate + :type host_user: str + :param ip_addr: DNS name or IP address of the environment + :type ip_addr: str + :param toolkit_path: Path to the toolkit. Note: This directory must be + writable by the host_user + :type toolkit_path: str + :param passwd: Password of the user. Default: None (use SSH keys instead) + :type passwd: str or bool + :param ase_user: username for ASE DB + :type ase_user: str + :param ase_pw: password for the ASE DB user + :type ase_pw: str """ - engine_name = dlpx_obj.dlpx_engines.keys()[0] - env_params_obj = HostEnvironmentCreateParameters() - - if pw is None: - print_debug('Creating the environment with SSH Keys') - env_params_obj.primary_user = {'type': 'EnvironmentUser', - 'name': host_user, - 'credential': { - 'type': 'SystemKeyCredential'}} - else: - print_debug('Creating the environment with a password') - env_params_obj.primary_user = {'type': 'EnvironmentUser', - 'name': host_user, - 'credential': { - 'type': 'PasswordCredential', - 'password': pw }} - - env_params_obj.host_parameters = {'type': 'UnixHostCreateParameters', - 'host': { 'address': ip_addr, - 'type': 'UnixHost', - 'name': env_name, - 'toolkitPath': toolkit_path}} - - env_params_obj.host_environment = UnixHostEnvironment() + env_params_obj = vo.HostEnvironmentCreateParameters() + env_params_obj.host_environment = vo.UnixHostEnvironment() + env_params_obj.host_parameters = vo.UnixHostCreateParameters() + env_params_obj.host_parameters.host = vo.UnixHost() env_params_obj.host_environment.name = env_name - - if arguments['--ase']: - env_params_obj.host_environment.ase_host_environment_parameters = \ - ASEHostEnvironmentParameters() - - try: - env_params_obj.host_environment.ase_host_environment_parameters.db_user = \ - arguments['--ase_user'] - env_params_obj.host_environment.ase_host_environment_parameters.credentials = { - 'type': 'PasswordCredential', - 'password': arguments['--ase_pw']} - except KeyError: - print_exception('The --ase_user and --ase_pw arguments are' - ' required with the --ase flag.\n') - + env_params_obj.host_parameters.host.address = ip_addr + env_params_obj.host_parameters.name = env_name + env_params_obj.host_parameters.host.toolkit_path = toolkit_path + # setting user credentials + env_params_obj.primary_user = vo.EnvironmentUser() + env_params_obj.primary_user.name = host_user + if passwd is None: + env_params_obj.primary_user.credential = vo.SystemKeyCredential() + else: + env_params_obj.primary_user.credential = vo.PasswordCredential() + env_params_obj.primary_user.credential.password = passwd + if ase_user: + env_params_obj.host_environment.ase_host_environment_parameters = ( + vo.ASEHostEnvironmentParameters() + ) + env_params_obj.host_environment.ase_host_environment_parameters.db_user = ( + ase_user + ) + env_params_obj.host_environment.ase_host_environment_parameters.credentials = ( + vo.PasswordCredential() + ) + env_params_obj.host_environment.ase_host_environment_parameters.credentials.password = ( + ase_pw + ) try: - environment.create(dlpx_obj.server_session, - env_params_obj) - dlpx_obj.jobs[engine_name] = \ - dlpx_obj.server_session.last_job - - except (DlpxException, RequestError, HttpError) as e: - print('\nERROR: Encountered an exception while creating the ' - 'environment:\n{}'.format(e)) - except JobError as e: - print_exception('JobError while creating environment {}:\n{}'.format( - e, e.message)) - - -def create_windows_env(dlpx_obj, env_name, host_user, ip_addr, - pw=None, connector_name=None): - + environment.create(dlpx_obj.server_session, env_params_obj) + dlpx_obj.jobs[ + dlpx_obj.server_session.address + ] = dlpx_obj.server_session.last_job + except ( + dlpx_exceptions.DlpxException, + exceptions.RequestError, + exceptions.HttpError, + ) as err: + raise dlpx_exceptions.DlpxException( + f"ERROR: Encountered an exception while creating the " + f"environment:\n{err}" + ) + except exceptions.JobError as err: + raise dlpx_exceptions.DlpxException( + f"JobError while creating environment:\n{err}" + ) from err + + +def create_windows_env( + dlpx_obj, env_name, host_user, ip_addr, passwd=None, connector_host_name=None +): """ Create a Windows environment. - - env_name: The name of the environment - host_user: The server account used to authenticate - ip_addr: DNS name or IP address of the environment - toolkit_path: Path to the toolkit. Note: This directory must be - writable by the host_user - pw: Password of the user. Default: None (use SSH keys instead) + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param env_name: The name of the environment + :type env_name: str + :param host_user: The server account used to authenticate + :type host_user: str + :param ip_addr: DNS name or IP address of the environment + :type ip_addr: str + :param passwd: Password of the user. Default: None (use SSH keys instead) + :type passwd: str + :param connector_name: Name of the Delphix connector + :type connector_name: str """ - engine_name = dlpx_obj.dlpx_engines.keys()[0] - - env_params_obj = HostEnvironmentCreateParameters() - - print_debug('Creating the environment with a password') - - env_params_obj.primary_user = {'type': 'EnvironmentUser', - 'name': host_user, - 'credential': { - 'type': 'PasswordCredential', - 'password': pw }} - - env_params_obj.host_parameters = {'type': 'WindowsHostCreateParameters', - 'host': { 'address': ip_addr, - 'type': 'WindowsHost', - 'name': env_name, - 'connectorPort': 9100}} - - env_params_obj.host_environment = WindowsHostEnvironment() + env_params_obj = vo.HostEnvironmentCreateParameters() + env_params_obj.primary_user = vo.EnvironmentUser() + env_params_obj.primary_user.name = host_user + env_params_obj.primary_user.credential = vo.PasswordCredential() + env_params_obj.primary_user.credential.password = passwd + env_params_obj.host_parameters = vo.WindowsHostCreateParameters() + env_params_obj.host_parameters.host = vo.WindowsHost() + env_params_obj.host_parameters.host.connector_port = 9100 + env_params_obj.host_parameters.host.address = ip_addr + env_params_obj.host_environment = vo.WindowsHostEnvironment() env_params_obj.host_environment.name = env_name - - if connector_name: - env_obj = find_obj_by_name(dlpx_obj.server_session, environment, - connector_name) - - if env_obj: + env_obj = None + if connector_host_name: + env_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, environment, connector_host_name + ) + if env_obj: env_params_obj.host_environment.proxy = env_obj.host - elif env_obj is None: - print('Host was not found in the Engine: {}'.format(arguments[--connector_name])) - sys.exit(1) - + elif connector_host_name is not None and env_obj is None: + raise dlpx_exceptions.DlpxObjectNotFound( + f"Host was not found in the Engine: {connector_host_name}" + ) try: - environment.create(dlpx_obj.server_session, - env_params_obj) - dlpx_obj.jobs[engine_name] = \ - dlpx_obj.server_session.last_job - - except (DlpxException, RequestError, HttpError) as e: - print('\nERROR: Encountered an exception while creating the ' - 'environment:\n{}'.format(e)) - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - - E.g.: - @run_async - def task1(): - do_something - - @run_async - def task2(): - do_something_too - - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func + environment.create(dlpx_obj.server_session, env_params_obj) + dlpx_obj.jobs[dlpx_obj.server_session.address].append( + dlpx_obj.server_session.last_job + ) + except ( + dlpx_exceptions.DlpxException, + exceptions.RequestError, + exceptions.HttpError, + ) as err: + raise dlpx_exceptions.DlpxException( + f"ERROR: Encountered an exception while creating the " + f"environment:\n{err}" + ) @run_async -def main_workflow(engine, dlpx_obj): +def main_workflow(engine, dlpx_obj, single_thread): """ This function is where we create our main workflow. Use the @run_async decorator to run this function asynchronously. The @run_async decorator allows us to run against multiple Delphix Engine simultaneously - :param engine: Dictionary of engines :type engine: dictionary - :param dlpx_obj: Virtualization Engine session object - :type dlpx_obj: lib.GetSession.GetSession + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param single_thread: True - run single threaded, False - run multi-thread + :type single_thread: bool """ - try: - # Setup the connection to the Delphix Engine - dlpx_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - - except DlpxException as e: - print_exception('ERROR: Engine {} encountered an error while' - '{}:\n{}\n'.format(engine['hostname'], - arguments['--target'], e)) - sys.exit(1) - - thingstodo = ["thingtodo"] + # Setup the connection to the Delphix DDP + dlpx_obj.dlpx_session( + engine["ip_address"], engine["username"], engine["password"] + ) + except dlpx_exceptions.DlpxException as err: + dx_logging.print_exception( + f"ERROR: dx_environment encountered an error authenticating to " + f' {engine["ip_address"]} :\n{err}' + ) try: with dlpx_obj.job_mode(single_thread): - while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0: - if len(thingstodo)> 0: - if arguments['--type'] == 'linux' or arguments['--type'] == 'windows': - env_name = arguments['--env_name'] - host_user = arguments['--host_user'] - pw = arguments['--pw'] - ip_addr = arguments['--ip'] - host_name = arguments['--connector_name'] - if arguments['--type'] == 'linux': - toolkit_path = arguments['--toolkit'] - create_linux_env(dlpx_obj, env_name, host_user, - ip_addr, toolkit_path, pw) - else: - create_windows_env(dlpx_obj, env_name, host_user, - ip_addr, pw, host_name,) - - elif arguments['--delete']: - delete_env(dlpx_obj, arguments['--delete']) - - elif arguments['--refresh']: - refresh_env(dlpx_obj, arguments['--refresh']) - - elif arguments['--update_ase_pw']: - update_ase_pw(dlpx_obj) - - elif arguments['--update_ase_user']: - update_ase_username(dlpx_obj) - elif arguments['--list']: - list_env(dlpx_obj) - elif arguments['--update_host']: - update_host_address(dlpx_obj, arguments['--old_host_address'], arguments['--new_host_address']) - elif arguments['--enable']: - enable_environment(dlpx_obj, arguments['--env_name']) - elif arguments['--disable']: - disable_environment(dlpx_obj, arguments['--env_name']) - - thingstodo.pop() - # get all the jobs, then inspect them - i = 0 - for j in dlpx_obj.jobs.keys(): - job_obj = job.get(dlpx_obj.server_session, dlpx_obj.jobs[j]) - print_debug(job_obj) - print_info('{} Environment: {}'.format( - engine['hostname'], job_obj.job_state)) - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - # If the job is in a non-running state, remove it - # from the running jobs list. - del dlpx_obj.jobs[j] - elif job_obj.job_state in 'RUNNING': - # If the job is in a running state, increment the - # running job count. - i += 1 - print_info('{}: {:d} jobs running.'.format( - engine['hostname'], i)) - # If we have running jobs, pause before repeating the - # checks. - if len(dlpx_obj.jobs) > 0: - sleep(float(arguments['--poll'])) - except (DlpxException, RequestError, JobError, HttpError) as e: - print_exception('Error while creating the environment {}\n{}'.format( - arguments['--env_name'], e)) - sys.exit(1) - -def run_job(dlpx_obj, config_file_path): - """ - This function runs the main_workflow aynchronously against all the - servers specified + if ARGUMENTS["--list"]: + list_env(dlpx_obj) + elif ARGUMENTS["--create"]: + env_name = ARGUMENTS["--env_name"] + host_user = ARGUMENTS["--host_user"] + passwd = ARGUMENTS["--passwd"] + ip_addr = ARGUMENTS["--ip"] + type = ARGUMENTS["--os_type"] + toolkit_path = ARGUMENTS["--toolkit"] + if type is None: + raise dlpx_exceptions.DlpxException( + "--os_type parameter is required for environment creation" + ) + + type = type.lower() + if type == "windows": + connector_host_name = ARGUMENTS["--connector_host_name"] + create_windows_env( + dlpx_obj, + env_name, + host_user, + ip_addr, + passwd, + connector_host_name, + ) + elif type == "linux": + if toolkit_path is None: + raise dlpx_exceptions.DlpxException( + "--toolkit parameter is required for environment " + "creation" + ) + create_linux_env( + dlpx_obj, env_name, host_user, ip_addr, toolkit_path, passwd + ) + elif ARGUMENTS["--enable"]: + enable_environment(dlpx_obj, ARGUMENTS["--env_name"]) + elif ARGUMENTS["--disable"]: + disable_environment(dlpx_obj, ARGUMENTS["--env_name"]) + elif ARGUMENTS["--delete"]: + delete_env(dlpx_obj, ARGUMENTS["--env_name"]) + elif ARGUMENTS["--refresh"]: + refresh_env(dlpx_obj, ARGUMENTS["--env_name"]) + elif ARGUMENTS["--update_host"]: + update_host_address( + dlpx_obj, + ARGUMENTS["--old_host_address"], + ARGUMENTS["--new_host_address"], + ) + run_job.track_running_jobs(engine, dlpx_obj, 5) + except ( + dlpx_exceptions.DlpxException, + exceptions.RequestError, + exceptions.JobError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f"Error in dx_environment for engine:" + f'{engine["ip_address"]}: Error Message: {err}' + ) - dlpx_obj: Virtualization Engine session object - config_file_path: filename of the configuration file for virtualization - engines - """ - - #Create an empty list to store threads we create. - threads = [] - engine = None - - #If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info("Executing against all Delphix Engines in the dxtools.conf") - try: - #For each server in the dxtools.conf... - for delphix_engine in dlpx_obj.dlpx_engines: - engine = dlpx_obj.dlpx_engines[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine, dlpx_obj)) - - except DlpxException as e: - print 'Error encountered in run_job():\n{}'.format(e) - sys.exit(1) - - elif arguments['--all'] is False: - #Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dlpx_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: {}\n'.format( - arguments['--engine'])) - except (DlpxException, RequestError, KeyError) as e: - print_exception('\nERROR: Delphix Engine {} cannot be ' - 'found in {}. Please check your value ' - 'and try again. Exiting.\n'.format( - arguments['--engine'], config_file_path)) - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dlpx_obj.dlpx_engines: - if dlpx_obj.dlpx_engines[delphix_engine]['default'] == \ - 'true': - engine = dlpx_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: {}'.format( - dlpx_obj.dlpx_engines[delphix_engine]['hostname'])) - break - if engine is None: - raise DlpxException("\nERROR: No default engine found. Exiting") - - #run the job against the engine - threads.append(main_workflow(engine, dlpx_obj)) - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def time_elapsed(time_start): +def main(): """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - - :param time_start: start time of the script. - :type time_start: float + main function - creates session and runs jobs """ - return round((time() - time_start)/60, +1) - - -def main(): - # We want to be able to call on these variables anywhere in the script. - global single_thread - global debug - - time_start = time() - single_thread = False - + time_start = time.time() try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - config_file_path = arguments['--config'] - # Parse the dxtools.conf and put it into a dictionary + dx_session_obj = get_session.GetSession() + dx_logging.logging_est(ARGUMENTS["--logdir"]) + config_file_path = ARGUMENTS["--config"] + single_thread = ARGUMENTS["--single_thread"] + engine = ARGUMENTS["--engine"] dx_session_obj.get_config(config_file_path) - - # This is the function that will handle processing main_workflow for - # all the servers. - run_job(dx_session_obj, config_file_path) - - elapsed_minutes = time_elapsed(time_start) - print_info('script took {:.2f} minutes to get this far.'.format( - elapsed_minutes)) - + for each in run_job.run_job_mt( + main_workflow, dx_session_obj, engine, single_thread + ): + each.join() + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"de_environment took {elapsed_minutes} minutes to " f"complete." + ) # Here we handle what we do when the unexpected happens - except SystemExit as e: + except SystemExit as err: # This is what we use to handle our sys.exit(#) - sys.exit(e) - - except DlpxException as e: - # We use this exception handler when an error occurs in a function call. - print_exception('ERROR: Please check the ERROR message below:\n' - '{}'.format(e.message)) + sys.exit(err) + + except dlpx_exceptions.DlpxException as err: + # We use this exception handler when an error occurs in a function + # call. + dx_logging.print_exception( + f"ERROR: Please check the ERROR message " f"below:\n {err.error}" + ) sys.exit(2) - except HttpError as e: + except exceptions.HttpError as err: # We use this exception handler when our connection to Delphix fails - print_exception('ERROR: Connection failed to the Delphix Engine. Please' - 'check the ERROR message below:\n{}'.format(e.message)) + dx_logging.print_exception( + f"ERROR: Connection failed to the Delphix DDP. Please check " + f"the ERROR message below:\n{err.status}" + ) sys.exit(2) - except JobError as e: + except exceptions.JobError as err: # We use this exception handler when a job fails in Delphix so that we # have actionable data - print_exception('A job failed in the Delphix Engine:\n{}'.format(e.job)) - elapsed_minutes = time_elapsed(time_start) - print_exception('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_exception( + f"A job failed in the Delphix Engine:\n{err.job}." + f"{basename(__file__)} took {elapsed_minutes} minutes to " + f"complete" + ) sys.exit(3) except KeyboardInterrupt: # We use this exception handler to gracefully handle ctrl+c exits - print_debug('You sent a CTRL+C to interrupt the process') - elapsed_minutes = time_elapsed(time_start) - print_info('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) - except: - # Everything else gets caught here - print_exception('{}\n{}'.format(sys.exc_info()[0], - traceback.format_exc())) - elapsed_minutes = time_elapsed(time_start) - print_info("{} took {:.2f} minutes to get this far".format( - basename(__file__), elapsed_minutes)) - sys.exit(1) - + dx_logging.print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"{basename(__file__)} took {elapsed_minutes} " f"minutes to complete." + ) if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - #Feed our arguments to the main function, and off we go! + # Grab our ARGUMENTS from the doc at the top of the script + ARGUMENTS = docopt.docopt(__doc__, version=basename(__file__) + " " + VERSION) + # Feed our ARGUMENTS to the main function, and off we go! main() diff --git a/dx_groups.py b/dx_groups.py deleted file mode 100755 index 7bb79c6..0000000 --- a/dx_groups.py +++ /dev/null @@ -1,363 +0,0 @@ -#!/usr/bin/env python -# Adam Bowen - Aug 2017 -#Description: -# This script will allow you to easily manage groups in Delphix -# -#Requirements -#pip install docopt delphixpy - -#The below doc follows the POSIX compliant standards and allows us to use -#this doc to also define our arguments for the script. -"""Description -Usage: - dx_groups.py (--group_name [--add | --delete]) - [--engine | --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - dx_groups.py (--list) - [--engine | --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - dx_groups.py -h | --help | -v | --version -Description - -Examples: - dx_groups.py --debug --config delphixpy-examples/dxtools_1.conf --group_name Test --add - dx_groups.py --config delphixpy-examples/dxtools_1.conf --group_name Test --delete - dx_groups.py --list - -Options: - --group_name The name of the group - --add Add the identified group - --delete Delete the identified group - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --debug Enable debug logging - --parallel Limit number of jobs to maxjob - --poll The number of seconds to wait between job polls - [default: 10] - --config The path to the dxtools.conf file - [default: ./dxtools.conf] - --logdir The path to the logfile you want to use. - [default: ./dx_skel.log] - -h --help Show this screen. - -v --version Show version. -""" - -VERSION = 'v.0.0.002' - -import sys -from os.path import basename -from time import sleep, time -from docopt import docopt - -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web import group -from delphixpy.v1_8_0.web.vo import Group - -from lib.DlpxException import DlpxException -from lib.DxLogging import logging_est -from lib.DxLogging import print_debug -from lib.DxLogging import print_info -from lib.DxLogging import print_exception -from lib.GetReferences import find_obj_by_name -from lib.GetReferences import find_all_objects -from lib.GetSession import GetSession - -def add_group(group_name): - """ - This function adds the group - """ - group_obj = Group() - group_obj.name = group_name - - - try: - group.create(dx_session_obj.server_session,group_obj) - print('Attempting to create {}'.format(group_name)) - except (DlpxException, RequestError) as e: - print_exception('\nERROR: Creating the group {} ' - 'encountered an error:\n{}'.format(group_name, e)) - sys.exit(1) - -def delete_group(group_name): - """ - This function adds the group - """ - group_obj = find_obj_by_name(dx_session_obj.server_session, - group, group_name) - - - try: - group.delete(dx_session_obj.server_session,group_obj.reference) - print('Attempting to delete {}'.format(group_name)) - except (DlpxException, RequestError) as e: - print_exception('\nERROR: Deleting the group {} ' - 'encountered an error:\n{}'.format(group_name, e)) - sys.exit(1) - -def list_groups(): - """ - This function lists all groups - """ - group_list = find_all_objects(dx_session_obj.server_session, group) - - for group_obj in group_list: - print('Group: {}'.format(group_obj.name)) - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - E.g.: - @run_async - def task1(): - do_something - @run_async - def task2(): - do_something_too - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func - - -@run_async -def main_workflow(engine): - """ - This function actually runs the jobs. - Use the @run_async decorator to run this function asynchronously. - This allows us to run against multiple Delphix Engine simultaneously - - engine: Dictionary of engines - """ - try: - #Setup the connection to the Delphix Engine - dx_session_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - - except DlpxException as e: - print_exception('\nERROR: Engine {} encountered an error while' - '{}:\n{}\n'.format(engine['hostname'], - arguments['--target'], e)) - sys.exit(1) - - thingstodo = ["thingtodo"] - try: - with dx_session_obj.job_mode(single_thread): - while (len(dx_session_obj.jobs) > 0 or len(thingstodo)> 0): - if len(thingstodo) > 0: - if arguments['--add'] : - add_group(arguments['--group_name']) - elif arguments['--delete']: - delete_group(arguments['--group_name']) - elif arguments['--list']: - list_groups() - thingstodo.pop() - # get all the jobs, then inspect them - i = 0 - for j in dx_session_obj.jobs.keys(): - job_obj = job.get(dx_session_obj.server_session, - dx_session_obj.jobs[j]) - print_debug(job_obj) - print_info('{}: Group: {}'.format( - engine['hostname'], job_obj.job_state)) - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - # If the job is in a non-running state, remove it - # from the - # running jobs list. - del dx_session_obj.jobs[j] - elif job_obj.job_state in 'RUNNING': - # If the job is in a running state, increment the - # running job count. - i += 1 - print_info('{}: {:d} jobs running.'.format( - engine['hostname'], i)) - # If we have running jobs, pause before repeating the - # checks. - if len(dx_session_obj.jobs) > 0: - sleep(float(arguments['--poll'])) - - except (HttpError, RequestError, JobError, DlpxException) as e: - print_exception('ERROR: Could not complete group ' - 'operation: {}'.format(e)) - - -def run_job(): - """ - This function runs the main_workflow aynchronously against all the servers - specified - """ - #Create an empty list to store threads we create. - threads = [] - engine = None - - #If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info("Executing against all Delphix Engines in the dxtools.conf") - - try: - #For each server in the dxtools.conf... - for delphix_engine in dx_session_obj.dlpx_engines: - engine = dx_session_obj[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine)) - - except DlpxException as e: - print 'Error encountered in run_job():\n{}'.format(e) - sys.exit(1) - - elif arguments['--all'] is False: - #Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dx_session_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: {}\n'.format( - (arguments['--engine']))) - - except (DlpxException, RequestError, KeyError) as e: - raise DlpxException('\nERROR: Delphix Engine {} cannot be ' - 'found in {}. Please check your value ' - 'and try again. Exiting.\n'.format( - arguments['--engine'], config_file_path)) - - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dx_session_obj.dlpx_engines: - if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ - 'true': - - engine = dx_session_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: {}'.format( - dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) - - break - - if engine == None: - raise DlpxException("\nERROR: No default engine found. Exiting") - - #run the job against the engine - threads.append(main_workflow(engine)) - - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def time_elapsed(): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - """ - #elapsed_minutes = round((time() - time_start)/60, +1) - #return elapsed_minutes - return round((time() - time_start)/60, +1) - - -def main(arguments): - #We want to be able to call on these variables anywhere in the script. - global single_thread - global usebackup - global time_start - global config_file_path - global dx_session_obj - global debug - - if arguments['--debug']: - debug = True - - try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - time_start = time() - single_thread = False - config_file_path = arguments['--config'] - #Parse the dxtools.conf and put it into a dictionary - dx_session_obj.get_config(config_file_path) - - #This is the function that will handle processing main_workflow for - # all the servers. - run_job() - - elapsed_minutes = time_elapsed() - print_info('script took {:.2f} minutes to get this far.'.format( - elapsed_minutes)) - - #Here we handle what we do when the unexpected happens - except DlpxException as e: - print_exception('script encountered an error while processing the' - 'config file:\n{}'.format(e)) - - except SystemExit as e: - """ - This is what we use to handle our sys.exit(#) - """ - sys.exit(e) - - except HttpError as e: - """ - We use this exception handler when our connection to Delphix fails - """ - print_exception('Connection failed to the Delphix Engine' - 'Please check the ERROR message:\n{}'.format(e)) - sys.exit(1) - - except JobError as e: - """ - We use this exception handler when a job fails in Delphix so that - we have actionable data - """ - elapsed_minutes = time_elapsed() - print_exception('A job failed in the Delphix Engine') - print_info('{} took {:.2f} minutes to get this far\n{}'.format( - basename(__file__), elapsed_minutes, e)) - sys.exit(3) - - except KeyboardInterrupt: - """ - We use this exception handler to gracefully handle ctrl+c exits - """ - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far\n'.format( - basename(__file__), elapsed_minutes)) - - except: - """ - Everything else gets caught here - """ - print_exception(sys.exc_info()[0]) - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far\n'.format( - basename(__file__), elapsed_minutes)) - sys.exit(1) - -if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - #Feed our arguments to the main function, and off we go! - main(arguments) diff --git a/dx_jetstream_container.py b/dx_jetstream_container.py deleted file mode 100755 index 0ba25db..0000000 --- a/dx_jetstream_container.py +++ /dev/null @@ -1,589 +0,0 @@ -#!/usr/bin/env python -#Adam Bowen - Jun 2016 -#dx_jetstream_container.py -#Use this file as a starter for your python scripts, if you like -#requirements -#pip install docopt delphixpy - -#The below doc follows the POSIX compliant standards and allows us to use -#this doc to also define our arguments for the script. This thing is brilliant. -"""Perform routine operations on Jetstream containers - -Usage: - dx_jetstream_container.py --template (--container | --all_containers ) - --operation [-d | --engine | --all] - [--bookmark_name ] [--bookmark_tags ] [--bookmark_shared ] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - dx_jetstream_container.py -h | --help | -v | --version - -Perform routine operations on a Jetstream Container - -Examples: - dx_jetstream_container.py --operation refresh --template "Masked SugarCRM Application" --container "Sugar Automated Testing Container" - dx_jetstream_container.py --operation reset --template "Masked SugarCRM Application" --all_containers - dx_jetstream_container.py --template "Masked SugarCRM Application" --container "Sugar Automated Testing Container" --operation bookmark --bookmark_name "Testing" --bookmark_tags "one,two,three" --bookmark_shared true - -Options: - -d Identifier of Delphix engine in dxtools.conf. - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --all_containers Run against all jetstream containers - --template Name of Jetstream template to execute against. - --container Name of Jetstream container to execute against. - --operation Name of the operation to execute - Can be one of: - start, stop, recover, refresh, reset, bookmark - --bookmark_name Name of the bookmark to create - (only valid with "--operation bookmark") - --bookmark_tags Comma-delimited list to tag the bookmark - (only valid with "--operation bookmark") - --bookmark_shared Share bookmark: true/false - [default: false] - --host Name of environment in Delphix to execute against. - --debug Enable debug logging - --parallel Limit number of jobs to maxjob - --poll The number of seconds to wait between job polls - [default: 10] - --config The path to the dxtools.conf file - [default: ./dxtools.conf] - --logdir The path to the logfile you want to use. - [default: ./dx_jetstream_container_refresh.log] - -h --help Show this screen. - -v --version Show version. - -""" - -VERSION="v.0.0.005" - - -from docopt import docopt -import logging -from os.path import basename -import signal -import sys -import time -import traceback -import json -import threading - -from multiprocessing import Process -from time import sleep, time - -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.exceptions import HttpError, JobError -from delphixpy.v1_6_0 import job_context -from delphixpy.v1_6_0.web import jetstream, job -from delphixpy.v1_6_0.web.vo import JSBookmark, JSBookmarkCreateParameters, JSTimelinePointLatestTimeInput -#from delphixpy.v1_6_0.web.vo import - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - - E.g.: - @run_async - def task1(): - do_something - - @run_async - def task2(): - do_something_too - - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - #from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = threading.Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func - -@run_async -def container_bookmark(engine, server, container_obj, bookmark_name, bookmark_shared, tags): - '''This function bookmarks the current branch on the container''' - #But first, let's make sure it is in a CONSISTENT state - container_recover(engine, server, container_obj) - #Next let's make sure it is started - container_start(engine, server, container_obj) - #Prepare the bookmark creation parameters - bookmark_create_params = JSBookmarkCreateParameters() - bookmark_create_params.bookmark = JSBookmark() - bookmark_create_params.bookmark.name = bookmark_name - bookmark_create_params.bookmark.branch = container_obj.active_branch - bookmark_create_params.bookmark.shared = bookmark_shared - bookmark_create_params.bookmark.tags = tags - bookmark_create_params.timeline_point_parameters = JSTimelinePointLatestTimeInput() - bookmark_create_params.timeline_point_parameters.source_data_layout = container_obj.reference - - jetstream.bookmark.create(server, bookmark_create_params) - -def container_recover(engine, server, container_obj): - '''This function recovers a container that is in an "INCONSISTENT" state''' - if container_obj.state == "INCONSISTENT": - #if not recover it - job_obj = jetstream.container.recover(server, container_obj.reference) - #wait for the recovery action to finish - job_context.wait(server,job_obj.reference) - #get the updated object with the new state - container_obj = jetstream.container.get(server, container_obj.reference) - return container_obj - -@run_async -def container_recover_async(engine, server, container_obj): - '''This function recovers all specified containers asynchronously''' - container_recover(engine, server, container_obj) - -@run_async -def container_refresh(engine, server, container_obj): - '''This function refreshes a container''' - #But first, let's make sure it is in a CONSISTENT state - container_recover(engine, server, container_obj) - #Next let's make sure it is started - container_start(engine, server, container_obj) - #Now let's refresh it. - refresh_job = jetstream.container.refresh(server, container_obj.reference) - -@run_async -def container_reset(engine, server, container_obj): - '''This function resets a container''' - #But first, let's make sure it is in a CONSISTENT state - container_recover(engine, server, container_obj) - #Next let's make sure it is started - container_start(engine, server, container_obj) - #Now let's refresh it. - reset_job = jetstream.container.reset(server, container_obj.reference) - -def container_start(engine, server, container_obj): - '''This function starts/enables a container that is in an "OFFLINE" state''' - if container_obj.state == "OFFLINE": - #if not, enable it - jetstream.container.enable(server, container_obj.reference) - -@run_async -def container_start_async(engine, server, container_obj): - '''This function starts all specified containers asynchronously''' - container_start(engine, server, container_obj) - -def container_stop(engine, server, container_obj): - '''This function starts/enables a container that is in an "OFFLINE" state''' - if container_obj.state == "ONLINE": - #if not, enable it - jetstream.container.disable(server, container_obj.reference) - -@run_async -def container_stop_async(engine, server, container_obj): - '''This function starts all specified containers asynchronously''' - container_stop(engine, server, container_obj) - -def find_container_by_name_and_template_name(engine, server, container_name, template_name): - template_obj = find_obj_by_name(engine, server, jetstream.template, template_name) - - containers = jetstream.container.get_all(server, template=template_obj.reference) - - for each in containers: - if each.name == container_name: - print_debug(engine["hostname"] + ": Found a match " + str(each.reference)) - return each - print_info("Unable to find \"" + container_name + "\" in " + template_name) - -def find_all_containers_by_template_name(engine, server, template_name): - template_obj = find_obj_by_name(engine, server, jetstream.template, template_name) - - containers = jetstream.container.get_all(server, template=template_obj.reference) - if containers: - for each in containers: - print_debug(engine["hostname"] + ": Found a match " + str(each.reference)) - return containers - print_info("Unable to find \"" + container_name + "\" in " + template_name) - -def find_obj_by_name(engine, server, f_class, obj_name): - """ - Function to find objects by name and object class, and return object's reference as a string - You might use this function to find objects like groups. - """ - print_debug(engine["hostname"] + ": Searching objects in the " + f_class.__name__ + " class\n for one named \"" + obj_name +"\"") - obj_ref = '' - - all_objs = f_class.get_all(server) - for obj in all_objs: - if obj.name == obj_name: - print_debug(engine["hostname"] + ": Found a match " + str(obj.reference)) - return obj - -def get_config(config_file_path): - """ - This function reads in the dxtools.conf file - """ - #First test to see that the file is there and we can open it - try: - config_file = open(config_file_path).read() - except: - print_error("Was unable to open " + config_file_path + ". Please check the path and permissions, then try again.") - sys.exit(1) - #Now parse the file contents as json and turn them into a python dictionary, throw an error if it isn't proper json - try: - config = json.loads(config_file) - except: - print_error("Was unable to read " + config_file_path + " as json. Please check file in a json formatter and try again.") - sys.exit(1) - #Create a dictionary of engines (removing the data node from the dxtools.json, for easier parsing) - delphix_engines = {} - for each in config['data']: - delphix_engines[each['hostname']] = each - print_debug(delphix_engines) - return delphix_engines - -def logging_est(logfile_path): - """ - Establish Logging - """ - global debug - logging.basicConfig(filename=logfile_path,format='%(levelname)s:%(asctime)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') - print_info("Welcome to " + basename(__file__) + ", version " + VERSION) - global logger - debug = arguments['--debug'] - logger = logging.getLogger() - if debug == True: - logger.setLevel(10) - print_info("Debug Logging is enabled.") - -def job_mode(server): - """ - This function tells Delphix how to execute jobs, based on the single_thread variable at the beginning of the file - """ - #Synchronously (one at a time) - if single_thread == True: - job_m = job_context.sync(server) - print_debug("These jobs will be executed synchronously") - #Or asynchronously - else: - job_m = job_context.async(server) - print_debug("These jobs will be executed asynchronously") - return job_m - -def job_wait(server): - """ - This job stops all work in the thread/process until jobs are completed. - """ - #Grab all the jos on the server (the last 25, be default) - all_jobs = job.get_all(server) - #For each job in the list, check to see if it is running (not ended) - for jobobj in all_jobs: - if not (jobobj.job_state in ["CANCELED", "COMPLETED", "FAILED"]): - print_debug("Waiting for " + jobobj.reference + " (currently: " + jobobj.job_state+ ") to finish running against the container") - #If so, wait - job_context.wait(server,jobobj.reference) - -def on_exit(sig, func=None): - """ - This function helps us end cleanly and with exit codes - """ - print_info("Shutdown Command Received") - print_info("Shutting down " + basename(__file__)) - sys.exit(0) - -def print_debug(print_obj): - """ - Call this function with a log message to prefix the message with DEBUG - """ - try: - if debug == True: - print "DEBUG: " + str(print_obj) - logging.debug(str(print_obj)) - except: - pass - -def print_error(print_obj): - """ - Call this function with a log message to prefix the message with ERROR - """ - print "ERROR: " + str(print_obj) - logging.error(str(print_obj)) - -def print_info(print_obj): - """ - Call this function with a log message to prefix the message with INFO - """ - print "INFO: " + str(print_obj) - logging.info(str(print_obj)) - -def print_warning(print_obj): - """ - Call this function with a log message to prefix the message with WARNING - """ - print "WARNING: " + str(print_obj) - logging.warning(str(print_obj)) - -def serversess(f_engine_address, f_engine_username, f_engine_password): - """ - Function to setup the session with the Delphix Engine - """ - server_session= DelphixEngine(f_engine_address, f_engine_username, f_engine_password, "DOMAIN") - return server_session - -def set_exit_handler(func): - """ - This function helps us set the correct exit code - """ - signal.signal(signal.SIGTERM, func) - -@run_async -def main_workflow(engine): - """ - This function is where the main workflow resides. - Use the @run_async decorator to run this function asynchronously. - This allows us to run against multiple Delphix Engine simultaneously - """ - - #Pull out the values from the dictionary for this engine - engine_address = engine["ip_address"] - engine_username = engine["username"] - engine_password = engine["password"] - #Establish these variables as empty for use later - containers = [] - jobs = {} - - - #Setup the connection to the Delphix Engine - server = serversess(engine_address, engine_username, engine_password) - - #If we specified a specific database by name.... - if arguments['--container']: - #Get the container object from the name - container_obj = find_container_by_name_and_template_name(engine, server, arguments['--container'], arguments['--template']) - if container_obj: - containers.append(container_obj) - #Else, if we said all containers ... - elif arguments['--all_containers']: - #Grab all containers in the template - containers = find_all_containers_by_template_name(engine, server, arguments['--template']) - if not containers or len(containers) == 0: - print_error("No containers found with the criterion specified") - return - #reset the running job count before we begin - i = 0 - container_threads = [] - #While there are still running jobs or containers still to process.... - while (i > 0 or len(containers) > 0): - #While there are containers still to process and we are still under - #the max simultaneous jobs threshold (if specified) - while len(containers) > 0 and (arguments['--parallel'] == None or i < int(arguments['--parallel'])): - #Give us the next database in the list, and remove it from the list - container_obj = containers.pop() - #what do we want to do? - if arguments['--operation'] == "refresh": - #refresh the container - container_threads.append(container_refresh(engine, server, container_obj)) - elif arguments['--operation'] == "reset": - container_threads.append(container_reset(engine, server, container_obj)) - elif arguments['--operation'] == "start": - container_threads.append(container_start_async(engine, server, container_obj)) - elif arguments['--operation'] == "stop": - container_threads.append(container_stop_async(engine, server, container_obj)) - elif arguments['--operation'] == "recover": - container_threads.append(container_recover_async(engine, server, container_obj)) - elif arguments['--operation'] == "bookmark": - if arguments['--bookmark_tags']: - tags = arguments['--bookmark_tags'].split(',') - else: - tags = [] - if arguments['--bookmark_shared']: - if str(arguments['--bookmark_shared']).lower() == "true": - bookmark_shared = True - elif str(arguments['--bookmark_shared']).lower() == "false": - bookmark_shared = False - else: - print_error("Invalid argument \"" + str(arguments['--bookmark_shared']).lower() + "\" for --bookmark_shared") - print_error("--bookmark_shared only takes a value of true/false.") - print_error("Exiting") - sys.exit(1) - else: - bookmark_shared=False - container_threads.append(container_bookmark(engine, server, container_obj, arguments['--bookmark_name'], bookmark_shared, tags)) - #For each thread in the list... - i = len(container_threads) - #Check to see if we are running at max parallel processes, and report if so. - if ( arguments['--parallel'] != None and i >= int(arguments['--parallel'])): - print_info(engine["hostname"] + ": Max jobs reached (" + str(i) + ")") - #reset the running jobs counter, as we are about to update the count from the jobs report. - i=0 - for t in container_threads: - if t.isAlive(): - i+=1 - print_info(engine["hostname"] + ": " + str(i) + " jobs running. " + str(len(containers)) + " jobs waiting to run") - #If we have running jobs, pause before repeating the checks. - if i > 0: - sleep(float(arguments['--poll'])) - print "made it out" - #For each thread in the list... - for each in container_threads: - #join them back together so that we wait for all threads to complete before moving on - each.join() - -def run_job(engine): - """ - This function runs the main_workflow aynchronously against all the servers specified - """ - #Create an empty list to store threads we create. - threads = [] - #If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info("Executing against all Delphix Engines in the dxtools.conf") - #For each server in the dxtools.conf... - for delphix_engine in dxtools_objects: - engine = dxtools_objects[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine)) - else: - #Else if the --engine argument was given, test to see if the engine exists in dxtools.conf - if arguments['--engine']: - try: - engine = dxtools_objects[arguments['--engine']] - print_info("Executing against Delphix Engine: " + arguments['--engine']) - except: - print_error("Delphix Engine \"" + arguments['--engine'] + "\" cannot be found in " + config_file_path) - print_error("Please check your value and try again. Exiting") - sys.exit(1) - #Else if the -d argument was given, test to see if the engine exists in dxtools.conf - elif arguments['-d']: - try: - engine = dxtools_objects[arguments['-d']] - print_info("Executing against Delphix Engine: " + arguments['-d']) - except: - print_error("Delphix Engine \"" + arguments['-d'] + "\" cannot be found in " + config_file_path) - print_error("Please check your value and try again. Exiting") - sys.exit(1) - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dxtools_objects: - if dxtools_objects[delphix_engine]['default'] == 'true': - engine = dxtools_objects[delphix_engine] - print_info("Executing against the default Delphix Engine in the dxtools.conf: " + dxtools_objects[delphix_engine]['hostname']) - break - if engine == None: - print_error("No default engine found. Exiting") - sys.exit(1) - #run the job against the engine - threads.append(main_workflow(engine)) - - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete before moving on - each.join() - -def time_elapsed(): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - """ - elapsed_minutes = round((time() - time_start)/60, +1) - return elapsed_minutes - -def update_jobs_dictionary(engine, server, jobs): - """ - This function checks each job in the dictionary and updates its status or removes it if the job is complete. - Return the number of jobs still running. - """ - #Establish the running jobs counter, as we are about to update the count from the jobs report. - i = 0 - #get all the jobs, then inspect them - for j in jobs.keys(): - job_obj = job.get(server, jobs[j]) - print_debug(engine["hostname"] + ": " + str(job_obj)) - print_info(engine["hostname"] + ": " + j.name + ": " + job_obj.job_state) - - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - #If the job is in a non-running state, remove it from the running jobs list. - del jobs[j] - else: - #If the job is in a running state, increment the running job count. - i += 1 - return i - -def main(argv): - #We want to be able to call on these variables anywhere in the script. - global single_thread - global usebackup - global time_start - global config_file_path - global dxtools_objects - - try: - #Declare globals that will be used throughout the script. - logging_est(arguments['--logdir']) - print_debug(arguments) - time_start = time() - engine = None - single_thread = False - - config_file_path = arguments['--config'] - #Parse the dxtools.conf and put it into a dictionary - dxtools_objects = get_config(config_file_path) - - #This is the function that will handle processing main_workflow for all the servers. - run_job(engine) - - elapsed_minutes = time_elapsed() - print_info("script took " + str(elapsed_minutes) + " minutes to get this far.") - - - #Here we handle what we do when the unexpected happens - except SystemExit as e: - """ - This is what we use to handle our sys.exit(#) - """ - sys.exit(e) - except HttpError as e: - """ - We use this exception handler when our connection to Delphix fails - """ - print_error("Connection failed to the Delphix Engine") - print_error( "Please check the ERROR message below") - print_error(e.message) - sys.exit(2) - except JobError as e: - """ - We use this exception handler when a job fails in Delphix so that we have actionable data - """ - print_error("A job failed in the Delphix Engine") - print_error(e.job) - elapsed_minutes = time_elapsed() - print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") - sys.exit(3) - except KeyboardInterrupt: - """ - We use this exception handler to gracefully handle ctrl+c exits - """ - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") - except: - """ - Everything else gets caught here - """ - print_error(sys.exc_info()[0]) - print_error(traceback.format_exc()) - elapsed_minutes = time_elapsed() - print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") - sys.exit(1) - -if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - - #Feed our arguments to the main function, and off we go! - print arguments - main(arguments) \ No newline at end of file diff --git a/dx_jobs.py b/dx_jobs.py deleted file mode 100755 index 069853b..0000000 --- a/dx_jobs.py +++ /dev/null @@ -1,367 +0,0 @@ -#!/usr/bin/env python -# Corey Brune - Oct 2016 -#Description: -# List jobs on a given engine -# -#Requirements -#pip install docopt delphixpy - -#The below doc follows the POSIX compliant standards and allows us to use -#this doc to also define our arguments for the script. -"""List jobs on an engine -Usage: - dx_jobs.py (--list [--state ][--title ]) - [--engine | --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - dx_jobs.py -h | --help | -v | --version - -List jobs on an engine - -Examples: - dx_jobs.py --list --state failed - dx_jobs.py --list --title snapsync - dx_jobs.py --list --state failed --title snapsync - - -Options: - --list List all jobs on an engine. - --title Filter job by title name. Note: The search is case insensitive. - --state Filter jobs by state: RUNNING, SUSPENDED, CANCELED, COMPLETED, FAILED - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --debug Enable debug logging - --parallel Limit number of jobs to maxjob - --poll The number of seconds to wait between job polls - [default: 10] - --config The path to the dxtools.conf file - [default: ./dxtools.conf] - --logdir The path to the logfile you want to use. - [default: ./dx_operations_vdb.log] - -h --help Show this screen. - -v --version Show version. -""" - -VERSION = 'v.0.0.002' - -import sys -import re -from os.path import basename -from time import sleep, time -from docopt import docopt - -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.web import job - -from lib.DlpxException import DlpxException -from lib.DxLogging import logging_est -from lib.DxLogging import print_debug -from lib.DxLogging import print_info -from lib.DxLogging import print_exception -from lib.GetSession import GetSession - - -def list_jobs(): - - if arguments['--state']: - if re.match('RUNNING|SUSPENDED|CANCELED|COMPLETED|FAILED', - arguments['--state'].upper()): - pass - else: - print_info('The state should be one of these options:\n' - 'RUNNING, SUSPENDED, CANCELED, COMPLETED, FAILED') - sys.exit(1) - - for job_info in job.get_all(dx_session_obj.server_session, - job_state=arguments['--state'].upper()): - - if arguments['--title']: - if re.search(arguments['--title'], job_info.title, - re.IGNORECASE): - print('Action={}, Job State={}, Parent Action State={},' - 'Percent Complete={}, Reference={}, Target={},' - 'Target Name={}, Title={}, User={}\n'.format( - job_info.action_type, job_info.job_state, - job_info.parent_action_state, - job_info.percent_complete, job_info.reference, - job_info.target, job_info.target_name, - job_info.title, job_info.user)) - else: - print('Action=%s, Job State=%s, Parent Action State=%s,' - 'Percent Complete=%s, Reference=%s, Target=%s,' - 'Target Name=%s, Title=%s, User=%s\n' % - (job_info.action_type, job_info.job_state, - job_info.parent_action_state, - job_info.percent_complete, job_info.reference, - job_info.target, job_info.target_name, - job_info.title, job_info.user)) - else: - for job_info in job.get_all(dx_session_obj.server_session): - - if arguments['--title']: - if re.search(arguments['--title'], job_info.title, - re.IGNORECASE): - print('Action=%s, Job State=%s, Parent Action State=%s,' - 'Percent Complete=%s, Reference=%s, Target=%s,' - 'Target Name=%s, Title=%s, User=%s\n' % - (job_info.action_type, job_info.job_state, - job_info.parent_action_state, job_info.percent_complete, - job_info.reference, job_info.target, job_info.target_name, - job_info.title, job_info.user)) - else: - print('Action=%s, Job State=%s, Parent Action State=%s,' - 'Percent Complete=%s, Reference=%s, Target=%s,' - 'Target Name=%s, Title=%s, User=%s\n' % - (job_info.action_type, job_info.job_state, - job_info.parent_action_state, job_info.percent_complete, - job_info.reference, job_info.target, - job_info.target_name, job_info.title, job_info.user)) - - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - E.g.: - @run_async - def task1(): - do_something - @run_async - def task2(): - do_something_too - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func - - -@run_async -def main_workflow(engine): - """ - This function actually runs the jobs. - Use the @run_async decorator to run this function asynchronously. - This allows us to run against multiple Delphix Engine simultaneously - - engine: Dictionary of engines - """ - jobs = {} - - try: - #Setup the connection to the Delphix Engine - dx_session_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - - except DlpxException as e: - print_exception('\nERROR: Engine {} encountered an error while' - '{}:\n{}\n'.format(engine['hostname'], - arguments['--target'], e)) - sys.exit(1) - - thingstodo = ["thingtodo"] - with dx_session_obj.job_mode(single_thread): - while len(dx_session_obj.jobs) > 0 or len(thingstodo) > 0: - if len(thingstodo) > 0: - - if arguments['--list']: - list_jobs() - thingstodo.pop() - - # get all the jobs, then inspect them - i = 0 - for j in dx_session_obj.jobs.keys(): - job_obj = job.get(dx_session_obj.server_session, - dx_session_obj.jobs[j]) - print_debug(job_obj) - print_info('{}: Operations: {}'.format(engine['hostname'], - job_obj.job_state)) - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - # If the job is in a non-running state, remove it from the - # running jobs list. - del dx_session_obj.jobs[j] - elif job_obj.job_state in 'RUNNING': - # If the job is in a running state, increment the running - # job count. - i += 1 - - print_info('{}: {:d} jobs running.'.format( - engine['hostname'], i)) - - # If we have running jobs, pause before repeating the checks. - if len(dx_session_obj.jobs) > 0: - sleep(float(arguments['--poll'])) - - -def run_job(): - """ - This function runs the main_workflow aynchronously against all the servers - specified - """ - #Create an empty list to store threads we create. - threads = [] - - #If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info("Executing against all Delphix Engines in the dxtools.conf") - - try: - #For each server in the dxtools.conf... - for delphix_engine in dx_session_obj.dlpx_engines: - engine = dx_session_obj[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine)) - - except DlpxException as e: - print 'Error encountered in run_job():\n{}'.format(e) - sys.exit(1) - - elif arguments['--all'] is False: - #Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dx_session_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: %s\n' % - (arguments['--engine'])) - - except (DlpxException, RequestError, KeyError) as e: - raise DlpxException('\nERROR: Delphix Engine %s cannot be ' 'found in %s. Please check your value ' - 'and try again. Exiting.\n' % ( - arguments['--engine'], config_file_path)) - - - - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dx_session_obj.dlpx_engines: - if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ - 'true': - - engine = dx_session_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: %s' % ( - dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) - - break - - if engine == None: - raise DlpxException("\nERROR: No default engine found. Exiting") - - #run the job against the engine - threads.append(main_workflow(engine)) - - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def time_elapsed(): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - """ - elapsed_minutes = round((time() - time_start)/60, +1) - return elapsed_minutes - - -def main(argv): - #We want to be able to call on these variables anywhere in the script. - global single_thread - global usebackup - global time_start - global config_file_path - global database_name - global dx_session_obj - global debug - - if arguments['--debug']: - debug = True - - try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - time_start = time() - engine = None - single_thread = False - config_file_path = arguments['--config'] - #Parse the dxtools.conf and put it into a dictionary - dx_session_obj.get_config(config_file_path) - - #This is the function that will handle processing main_workflow for - # all the servers. - run_job() - - elapsed_minutes = time_elapsed() - print_info("script took " + str(elapsed_minutes) + - " minutes to get this far.") - - #Here we handle what we do when the unexpected happens - except SystemExit as e: - """ - This is what we use to handle our sys.exit(#) - """ - sys.exit(e) - - except HttpError as e: - """ - We use this exception handler when our connection to Delphix fails - """ - print_exception('Connection failed to the Delphix Engine' - 'Please check the ERROR message below') - sys.exit(1) - - except JobError as e: - """ - We use this exception handler when a job fails in Delphix so that - we have actionable data - """ - elapsed_minutes = time_elapsed() - print_exception('A job failed in the Delphix Engine') - print_info('%s took %s minutes to get this far\n' % - (basename(__file__), str(elapsed_minutes))) - sys.exit(3) - - except KeyboardInterrupt: - """ - We use this exception handler to gracefully handle ctrl+c exits - """ - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info('%s took %s minutes to get this far\n' % - (basename(__file__), str(elapsed_minutes))) - - except: - """ - Everything else gets caught here - """ - print_exception(sys.exc_info()[0]) - elapsed_minutes = time_elapsed() - print_info('%s took %s minutes to get this far\n' % - (basename(__file__), str(elapsed_minutes))) - sys.exit(1) - -if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - #Feed our arguments to the main function, and off we go! - main(arguments) diff --git a/dx_operations.py b/dx_operations.py index 9827d78..0d0f81b 100755 --- a/dx_operations.py +++ b/dx_operations.py @@ -1,17 +1,18 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Corey Brune - Oct 2016 # This script starts or stops a VDB # requirements # pip install docopt delphixpy # The below doc follows the POSIX compliant standards and allows us to use -# this doc to also define our arguments for the script. +# this doc to also define our ARGUMENTS for the script. """List all VDBs or Start, stop, enable, disable a VDB Usage: - dx_operations_vdb.py (--vdb [--stop | --start | --enable | --disable] | --list | --all_dbs ) - [-d | --engine | --all] - [--force] [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] + dx_operations_vdb.py (--vdb [--stop | --start | --enable | \ + --disable] | --list | --all_dbs ) + [--engine ] + [--force --parallel --poll --single_thread ] + [--config ] [--logdir ] dx_operations_vdb.py -h | --help | -v | --version List all VDBs, start, stop, enable, disable a VDB @@ -30,11 +31,12 @@ --list List all databases from an engine --enable Enable the VDB --disable Disable the VDB - -d Identifier of Delphix engine in dxtools.conf. - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. + --engine Identifier of Delphix engine in dxtools.conf. + [default: default] + --single_thread Run as a single thread. False if running multiple + threads. + [default: True] --force Do not clean up target in VDB disable operations - --debug Enable debug logging --parallel Limit number of jobs to maxjob --poll The number of seconds to wait between job polls [default: 10] @@ -45,40 +47,30 @@ -h --help Show this screen. -v --version Show version. """ - -VERSION = 'v.0.3.018' - import sys +import time from os.path import basename -from time import sleep, time -import traceback -from docopt import docopt -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.web import database -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web import source -from delphixpy.v1_8_0.web.capacity import consumer -from delphixpy.v1_8_0.web.vo import SourceDisableParameters -from delphixpy.v1_8_0.web.source import source +import docopt + +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import database +from delphixpy.v1_10_2.web import source +from delphixpy.v1_10_2.web.capacity import consumer +from delphixpy.v1_10_2.web.vo import SourceDisableParameters +from lib import dlpx_exceptions +from lib import dx_logging +from lib import get_references +from lib import get_session +from lib import run_job +from lib.run_async import run_async -from lib.DlpxException import DlpxException -from lib.DxLogging import logging_est -from lib.DxLogging import print_debug -from lib.DxLogging import print_info -from lib.DxLogging import print_exception -from lib.GetReferences import find_obj_by_name -from lib.GetReferences import find_all_objects -from lib.GetReferences import find_obj_list -from lib.GetSession import GetSession +VERSION = "v.0.3.004" def dx_obj_operation(dlpx_obj, vdb_name, operation): """ Function to start, stop, enable or disable a VDB - :param dlpx_obj: Virtualization Engine session object :type dlpx_obj: lib.GetSession.GetSession :param vdb_name: Name of the object to stop/start/enable/disable @@ -86,354 +78,234 @@ def dx_obj_operation(dlpx_obj, vdb_name, operation): :param operation: enable or disable dSources and VDBs :type operation: str """ - - print_debug('Searching for {} reference.\n'.format(vdb_name)) - engine_name = dlpx_obj.dlpx_engines.keys()[0] - vdb_obj = find_obj_by_name(dlpx_obj.server_session, source, vdb_name) + engine_name = list(dlpx_obj.dlpx_ddps)[0] + vdb_obj = get_references.find_obj_by_name(dlpx_obj.server_session, source, vdb_name) try: if vdb_obj: - if operation == 'start': + if operation == "start": source.start(dlpx_obj.server_session, vdb_obj.reference) - elif operation == 'stop': + elif operation == "stop": source.stop(dlpx_obj.server_session, vdb_obj.reference) - elif operation == 'enable': + elif operation == "enable": source.enable(dlpx_obj.server_session, vdb_obj.reference) - elif operation == 'disable': - source.disable(dlpx_obj.server_session, - vdb_obj.reference) - elif operation == 'force_disable': + elif operation == "disable": + source.disable(dlpx_obj.server_session, vdb_obj.reference) + elif operation == "force_disable": disable_params = SourceDisableParameters() disable_params.attempt_cleanup = False - source.disable(dlpx_obj.server_session, - vdb_obj.reference, - disable_params) + source.disable( + dlpx_obj.server_session, vdb_obj.reference, disable_params + ) dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job - except (RequestError, HttpError, JobError, AttributeError), e: - print_exception('An error occurred while performing {} on {}:\n' - '{}'.format(operation, vdb_name, e)) - print '{} was successfully performed on {}.'.format(operation, vdb_name) + except (exceptions.RequestError, exceptions.JobError, AttributeError) as err: + raise dlpx_exceptions.DlpxException( + f"An error occurred while performing {operation} on {vdb_obj}:\n" f"{err}" + ) + print(f"{operation} was successfully performed on {vdb_name}.") def all_databases(dlpx_obj, operation): """ Enable or disable all dSources and VDBs on an engine - :param dlpx_obj: Virtualization Engine session object :type dlpx_obj: lib.GetSession.GetSession :param operation: enable or disable dSources and VDBs :type operation: str """ - for db in database.get_all(dlpx_obj.server_session): try: dx_obj_operation(dlpx_obj, db.name, operation) - except (RequestError, HttpError, JobError): + except (exceptions.RequestError, exceptions.HttpError): pass - print '{} {}\n'.format(operation, db.name) - sleep(2) + time.sleep(1) def list_databases(dlpx_obj): """ Function to list all databases and stats for an engine - :param dlpx_obj: Virtualization Engine session object :type dlpx_obj: lib.GetSession.GetSession """ - - source_stats_lst = find_all_objects(dlpx_obj.server_session, source) - is_dSource = None + all_source_objs = source.get_all(dlpx_obj.server_session) + all_consumer_objs = consumer.get_all(dlpx_obj.server_session) + db_size = None + active_space = None + sync_space = None + log_space = None try: - for db_stats in find_all_objects(dlpx_obj.server_session, - consumer): - source_stats = find_obj_list(source_stats_lst, db_stats.name) + for db_stats in all_consumer_objs: + source_stats = get_references.find_obj_list(all_source_objs, db_stats.name) if source_stats is not None: - if source_stats.virtual is False: - db_size = source_stats.runtime.database_size/1024/1024/1024 - print('name: {}, provision container: dSource, disk usage: ' - '{:.2f}GB, Size of Snapshots: {:.2f}GB, ' - 'dSource Size: {:.2f}GB, Log Size: {:.2f}MB,' - 'Enabled: {}, Status: {}'.format(str(db_stats.name), - db_stats.breakdown.active_space/1024/1024/1024, - db_stats.breakdown.sync_space/1024/1024/1024, - source_stats.runtime.database_size/1024/1024/1024, - db_stats.breakdown.log_space/1024/1024, - source_stats.runtime.enabled, - source_stats.runtime.status)) - elif source_stats.virtual is True: - print('name: {}, provision container: {}, disk usage: ' - '{:.2f}GB, Size of Snapshots: {:.2f}GB, ' - 'Log Size: {:.2f}MB, Enabled: {}, ' - 'Status: {}'.format(str(db_stats.name), - db_stats.parent, - db_stats.breakdown.active_space/1024/1024/1024, - db_stats.breakdown.sync_space/1024/1024/1024, - db_stats.breakdown.log_space/1024/1024, - source_stats.runtime.enabled, - source_stats.runtime.status)) + active_space = db_stats.breakdown.active_space / 1024 / 1024 / 1024 + sync_space = db_stats.breakdown.sync_space / 1024 / 1024 / 1024 + log_space = db_stats.breakdown.log_space / 1024 / 1024 + db_size = source_stats.runtime.database_size / 1024 / 1024 / 1024 + if source_stats.virtual is False: + print( + f"name: {db_stats.name}, provision container:" + f" {db_stats.parent}, disk usage: {db_size:.2f}GB," + f"Size of Snapshots: {active_space:.2f}GB, " + f"dSource Size: {sync_space:.2f}GB, " + f"Log Size: {log_space:.2f}MB," + f"Enabled: {source_stats.runtime.enabled}," + f"Status: {source_stats.runtime.status}" + ) + elif source_stats.virtual is True: + print( + f"name: {db_stats.name}, provision container: " + f"{db_stats.parent}, disk usage: " + f"{active_space:.2f}GB, Size of Snapshots: " + f"{sync_space:.2f}GB" + f"Log Size: {log_space:.2f}MB, Enabled: " + f"{source_stats.runtime.enabled}, " + f"Status: {source_stats.runtime.status}" + ) elif source_stats is None: - print('name: {},provision container: {},database disk ' - 'usage: {:.2f} GB,Size of Snapshots: {:.2f} GB,' - 'Could not find source information. This could be a ' - 'result of an unlinked object'.format( - str(db_stats.name), str(db_stats.parent), - db_stats.breakdown.active_space / 1024 / 1024 / 1024, - db_stats.breakdown.sync_space / 1024 / 1024 / 1024)) - except (RequestError, JobError, AttributeError, DlpxException) as err: - print 'An error occurred while listing databases: {}'.format(err) - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - E.g.: - @run_async - def task1(): - do_something - @run_async - def task2(): - do_something_too - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func + print( + f"name: {db_stats.name},provision container: " + f"{db_stats.parent}, database disk usage: " + f"{db_size:.2f}GB," + f"Size of Snapshots: {active_space:.2f}GB," + "Could not find source information. This could be a " + "result of an unlinked object" + ) + except ( + exceptions.RequestError, + AttributeError, + dlpx_exceptions.DlpxException, + ) as err: + print(f"An error occurred while listing databases: {err}") @run_async -def main_workflow(engine, dlpx_obj): +def main_workflow(engine, dlpx_obj, single_thread): """ This function is where we create our main workflow. Use the @run_async decorator to run this function asynchronously. The @run_async decorator allows us to run against multiple Delphix Engine simultaneously - :param engine: Dictionary of engines :type engine: dictionary - :param dlpx_obj: Virtualization Engine session object - :type dlpx_obj: lib.GetSession.GetSession + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param single_thread: True - run single threaded, False - run multi-thread + :type single_thread: bool """ - try: - # Setup the connection to the Delphix Engine - dlpx_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - - except DlpxException as e: - print_exception('ERROR: Engine {} encountered an error while' - '{}:\n{}\n'.format(engine['hostname'], - arguments['--target'], e)) - sys.exit(1) - - thingstodo = ["thingtodo"] + # Setup the connection to the Delphix DDP + dlpx_obj.dlpx_session( + engine["ip_address"], + engine["username"], + engine["password"], + engine["use_https"], + ) + except dlpx_exceptions.DlpxException as err: + dx_logging.print_exception( + f"ERROR: {basename(__file__)} encountered an error authenticating" + f' to {engine["hostname"]} {ARGUMENTS["--target"]}:\n{err}' + ) + thingstodo = ["thingstodo"] try: with dlpx_obj.job_mode(single_thread): - while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0: - if len(thingstodo) > 0: - if arguments['--start']: - dx_obj_operation(dlpx_obj, arguments['--vdb'], 'start') - elif arguments['--stop']: - dx_obj_operation(dlpx_obj, arguments['--vdb'], 'stop') - elif arguments['--enable']: - dx_obj_operation(dlpx_obj, arguments['--vdb'], 'enable') - elif arguments['--disable']: - if arguments['--force']: + while dlpx_obj.jobs or thingstodo: + if thingstodo: + if ARGUMENTS["--start"]: + dx_obj_operation(dlpx_obj, ARGUMENTS["--vdb"], "start") + elif ARGUMENTS["--stop"]: + dx_obj_operation(dlpx_obj, ARGUMENTS["--vdb"], "stop") + elif ARGUMENTS["--enable"]: + dx_obj_operation(dlpx_obj, ARGUMENTS["--vdb"], "enable") + elif ARGUMENTS["--disable"]: + if ARGUMENTS["--force"]: dx_obj_operation( - dlpx_obj, arguments['--vdb'], 'force_disable') + dlpx_obj, ARGUMENTS["--vdb"], "force_disable" + ) else: - dx_obj_operation( - dlpx_obj, arguments['--vdb'], 'disable') - elif arguments['--list']: + dx_obj_operation(dlpx_obj, ARGUMENTS["--vdb"], "disable") + elif ARGUMENTS["--list"]: list_databases(dlpx_obj) - elif arguments['--all_dbs']: - all_databases(dlpx_obj, arguments['--all_dbs']) + elif ARGUMENTS["--all_dbs"]: + all_databases(dlpx_obj, ARGUMENTS["--all_dbs"]) thingstodo.pop() - # get all the jobs, then inspect them - i = 0 - for j in dlpx_obj.jobs.keys(): - job_obj = job.get(dlpx_obj.server_session, dlpx_obj.jobs[j]) - print_debug(job_obj) - print_info('{}: Running JS Bookmark: {}'.format( - engine['hostname'], job_obj.job_state)) - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - # If the job is in a non-running state, remove it - # from the running jobs list. - del dlpx_obj.jobs[j] - elif job_obj.job_state in 'RUNNING': - # If the job is in a running state, increment the - # running job count. - i += 1 - print_info('{}: {:d} jobs running.'.format( - engine['hostname'], i)) - # If we have running jobs, pause before repeating the - # checks. - if len(dlpx_obj.jobs) > 0: - sleep(float(arguments['--poll'])) - except (DlpxException, RequestError, JobError, HttpError) as e: - print_exception('Error in js_bookmark: {}\n{}'.format( - engine['hostname'], e)) - sys.exit(1) - - -def time_elapsed(time_start): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - - :param time_start: start time of the script. - :type time_start: float - """ - return round((time() - time_start)/60, +1) + run_job.find_job_state(engine, dlpx_obj) + except ( + dlpx_exceptions.DlpxException, + exceptions.RequestError, + exceptions.JobError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f'Error in {basename(__file__)}: {engine["ip_address"]}\n{err}' + ) -def run_job(dlpx_obj, config_file_path): +def main(): """ - This function runs the main_workflow aynchronously against all the - servers specified - - :param dlpx_obj: Virtualization Engine session object - :type dlpx_obj: lib.GetSession.GetSession - :param config_file_path: string containing path to configuration file. - :type config_file_path: str + main function - creates session and runs jobs """ - - # Create an empty list to store threads we create. - threads = [] - engine = None - - # If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info('Executing against all Delphix Engines in the dxtools.conf') - try: - # For each server in the dxtools.conf... - for delphix_engine in dlpx_obj.dlpx_engines: - engine = dlpx_obj.dlpx_engines[delphix_engine] - # Create a new thread and add it to the list. - threads.append(main_workflow(engine, dlpx_obj)) - except DlpxException as e: - print_exception('Error encountered in run_job():\n{}'.format(e)) - sys.exit(1) - - elif arguments['--all'] is False: - # Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dlpx_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: {}\n'.format( - arguments['--engine'])) - except (DlpxException, RequestError, KeyError): - raise DlpxException('\nERROR: Delphix Engine {} cannot be ' - 'found in {}. Please check your value and' - ' try again. Exiting.\n'.format( - arguments['--engine'], config_file_path)) - else: - # Else search for a default engine in the dxtools.conf - for delphix_engine in dlpx_obj.dlpx_engines: - if dlpx_obj.dlpx_engines[delphix_engine]['default'] == 'true': - engine = dlpx_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: {}'.format( - dlpx_obj.dlpx_engines[delphix_engine]['hostname'])) - break - - if engine is None: - raise DlpxException('\nERROR: No default engine found. Exiting') - - # run the job against the engine - threads.append(main_workflow(engine, dlpx_obj)) - - # For each thread in the list... - for each in threads: - # join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def main(): - # We want to be able to call on these variables anywhere in the script. - global single_thread - global debug - - time_start = time() - single_thread = False - + time_start = time.time() try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - config_file_path = arguments['--config'] - # Parse the dxtools.conf and put it into a dictionary + dx_session_obj = get_session.GetSession() + dx_logging.logging_est(ARGUMENTS["--logdir"]) + config_file_path = ARGUMENTS["--config"] + single_thread = ARGUMENTS["--single_thread"] + engine = ARGUMENTS["--engine"] dx_session_obj.get_config(config_file_path) - # This is the function that will handle processing main_workflow for # all the servers. - run_job(dx_session_obj, config_file_path) - - elapsed_minutes = time_elapsed(time_start) - print_info('script took {:.2f} minutes to get this far.'.format( - elapsed_minutes)) - + for each in run_job.run_job( + main_workflow, dx_session_obj, engine, single_thread + ): + # join them back together so that we wait for all threads to + # complete + each.join() + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"script took {elapsed_minutes} minutes to " f"get this far." + ) # Here we handle what we do when the unexpected happens - except SystemExit as e: + except SystemExit as err: # This is what we use to handle our sys.exit(#) - sys.exit(e) - - except DlpxException as e: - # We use this exception handler when an error occurs in a function call. - print_exception('ERROR: Please check the ERROR message below:\n' - '{}'.format(e.message)) + sys.exit(err) + + except dlpx_exceptions.DlpxException as err: + # We use this exception handler when an error occurs in a function + # call. + dx_logging.print_exception( + f"ERROR: Please check the ERROR message " f"below:\n {err.error}" + ) sys.exit(2) - except HttpError as e: + except exceptions.HttpError as err: # We use this exception handler when our connection to Delphix fails - print_exception('ERROR: Connection failed to the Delphix Engine. Please' - 'check the ERROR message below:\n{}'.format(e.message)) + dx_logging.print_exception( + f"ERROR: Connection failed to the Delphix DDP. Please check " + f"the ERROR message below:\n{err.status}" + ) sys.exit(2) - except JobError as e: + except exceptions.JobError as err: # We use this exception handler when a job fails in Delphix so that we # have actionable data - print_exception('A job failed in the Delphix Engine:\n{}'.format(e.job)) - elapsed_minutes = time_elapsed(time_start) - print_exception('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_exception( + f"A job failed in the Delphix Engine:\n{err.job}." + f"{basename(__file__)} took {elapsed_minutes} minutes to get " + f"this far" + ) sys.exit(3) except KeyboardInterrupt: # We use this exception handler to gracefully handle ctrl+c exits - print_debug('You sent a CTRL+C to interrupt the process') - elapsed_minutes = time_elapsed(time_start) - print_info('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) - except: - # Everything else gets caught here - print_exception('{}\n{}'.format(sys.exc_info()[0], - traceback.format_exc())) - elapsed_minutes = time_elapsed(time_start) - print_info("{} took {:.2f} minutes to get this far".format( - basename(__file__), elapsed_minutes)) - sys.exit(1) + dx_logging.print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"{basename(__file__)} took {elapsed_minutes} " f"minutes to get this far." + ) if __name__ == "__main__": - # Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - - # Feed our arguments to the main function, and off we go! + # Grab our ARGUMENTS from the doc at the top of the script + ARGUMENTS = docopt.docopt(__doc__, version=basename(__file__) + " " + VERSION) + # Feed our ARGUMENTS to the main function, and off we go! main() diff --git a/dx_provision_dsource.py b/dx_provision_dsource.py index e3bb304..f510851 100755 --- a/dx_provision_dsource.py +++ b/dx_provision_dsource.py @@ -1,628 +1,309 @@ -#!/usr/bin/env python -# Corey Brune - Feb 2017 -#Description: -# Create and sync a dSource -# -#Requirements -#pip install docopt delphixpy +#!/usr/bin/env python3 +# Requirements +# pip install docopt delphixpy + +# The below doc follows the POSIX compliant standards and allows us to use +# this doc to also define our ARGUMENTS for the script. -#The below doc follows the POSIX compliant standards and allows us to use -#this doc to also define our arguments for the script. """Create and sync a dSource Usage: - dx_provision_dsource.py (--type ) - dx_provision_dsource.py --type --dsource_name --ip_addr --db_name --env_name --db_install_path --dx_group --db_passwd --db_user [--port_num ][--num_connections ][--link_now ][--files_per_set ][--rman_channels ] - [--engine | --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - dx_provision_dsource.py --type --dsource_name --ase_user --ase_passwd --backup_path --source_user --stage_user aseadmin --stage_repo ASE1570_S2 --src_config --env_name --dx_group [--bck_file ][--create_bckup] - [--engine | --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - dx_provision_dsource.py --type --dsource_name --dx_group --db_passwd --db_user --stage_instance --stage_env --backup_path [--backup_loc_passwd --backup_loc_user --logsync [--sync_mode ] --load_from_backup] - [--engine | --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - dx_provision_dsource.py -h | --help | -v | --version + dx_provision_dsource.py --type --dsource_name --ip_addr --env_name \ + --envinst --dx_group --db_passwd --db_user \ + [--logsync --logsync_mode ] + [--port_num --num_connections --link_now ] + [--files_per_set --rman_channels ] + [--engine ] + [--poll --config --logdir ] + dx_provision_dsource.py --type --dsource_name --ase_user --ase_passwd \ + --backup_path --source_user --stage_user aseadmin --stage_repo --src_config \ + --env_name --dx_group + [--bck_file --create_bckup] + [--engine ] + [--parallel --poll --config --logdir ] + dx_provision_dsource.py --type --dsource_name --dx_group --db_passwd \ + --db_user --stage_instance --stage_env --backup_path --env_name \ + --envinst + [--backup_loc_passwd --backup_loc_user --logsync] + [--val_sync_mode --delphix_managed --init_load_type --backup_uuid --single_thread ] + [--engine ] + [--parallel --poll --config --logdir ] + dx_provision_dsource.py -h | --help | -v | --version Create and sync a dSource Examples: Oracle: - dx_provision_dsource.py --type oracle --dsource_name oradb1 --ip_addr 192.168.166.11 --db_name srcDB1 --env_name SourceEnv --db_install_path /u01/app/oracle/product/11.2.0.4/dbhome_1 --db_user delphixdb --db_passwd delphixdb - + dx_provision_dsource.py --type oracle --dsource_name orasrc1 \ + --ip_addr 10.0.1.20 --env_name orasrc \ + --envinst /u01/app/oracle/product/12.2.0/dbhome_1 \ + --db_user delphixdb --dx_group Production --db_passwd delphixdb Sybase: - dx_provision_dsource.py --type sybase --dsource_name dbw1 --ase_user sa --ase_passwd sybase --backup_path /data/db --source_user aseadmin --stage_user aseadmin --stage_repo ASE1570_S2 --src_config dbw1 --env_name aseSource --dx_group Sources - + dx_provision_dsource.py --type sybase --dsource_name dbw1 --ase_user sa \ + --ase_passwd sybase --backup_path /data/db --source_user aseadmin \ + --stage_user aseadmin --stage_repo ASE1570_S2 --src_config dbw1 \ + --env_name aseSource --dx_group Sources --single_thread False Specify backup files: - dx_provision_dsource.py --type sybase --dsource_name dbw2 --ase_user sa --ase_passwd sybase --backup_path /data/db --source_user aseadmin --stage_user aseadmin --stage_repo ASE1570_S2 --src_config dbw2 --env_name aseSource --dx_group Sources --bck_file "dbw2data.dat" - + dx_provision_dsource.py --type sybase --dsource_name dbw2 --ase_user sa \ + --ase_passwd sybase --backup_path /data/db --source_user aseadmin \ + --stage_user aseadmin --stage_repo ASE1570_S2 --src_config dbw2 \ + --env_name aseSource --dx_group Sources --bck_file "dbw2data.dat" Create a new backup and ingest: - dx_provision_dsource.py --type sybase --dsource_name dbw2 --ase_user sa --ase_passwd sybase --backup_path /data/db --source_user aseadmin --stage_user aseadmin --stage_repo ASE1570_S2 --src_config dbw2 --env_name aseSource --dx_group Sources --create_bckup - + dx_provision_dsource.py --type sybase --dsource_name dbw2 --ase_user sa \ + --ase_passwd sybase --backup_path /data/db --source_user aseadmin \ + --stage_user aseadmin --stage_repo ASE1570_S2 --src_config dbw2 \ + --env_name aseSource --dx_group Sources --create_bckup MSSQL: - dx_provision_dsource.py --type mssql --dsource_name mssql_dsource --dx_group Sources --db_passwd delphix --db_user sa --stage_env mssql_target_svr --stage_instance MSSQLSERVER --backup_path \\bckserver\path\backups --backup_loc_passwd delphix --backup_loc_user delphix - dx_provision_dsource.py --type mssql --dsource_name AdventureWorks2014 --dx_group "9 - Sources" --db_passwd delphixdb --db_user aw --stage_env WINDOWSTARGET --stage_instance MSSQLSERVER --logsync --backup_path auto --load_from_backup - + dx_provision_dsource.py --type mssql --dsource_name suitecrm --dx_group Production \ + --db_passwd delphix --db_user delphix --env_name winsrc --stage_env wintgt + --stage_instance MSSQLSERVER --backup_path "\\10.0.1.50\backups" + --init_load_type SPECIFIC --backup_uuid A5919604-A263-4DA3-9204-23D9868ABC99 + --engine myve2 --envinst "c:\Program Files\Microsoft SQL Server\130" + dx_provision_dsource.py --type mssql --dsource_name suitecrm --dx_group Production \ + --db_passwd delphix --db_user delphix --env_name winsrc --stage_env wintgt + --stage_instance MSSQLSERVER --backup_path "\\10.0.1.50\backups" --delphix_managed True + --init_load_type COPY_ONLY --backup_uuid A5919604-A263-4DA3-9204-23D9868ABC99 + --engine myve2 --envinst "c:\Program Files\Microsoft SQL Server\130" Options: --type dSource type. mssql, sybase or oracle + [default: oracle] --ip_addr IP Address of the dSource - --db_name Name of the dSource DB + [default: None] --env_name Name of the environment where the dSource installed - --db_install_path Location of the installation path of the DB. + --dx_group Group where the dSource will reside + --envinst Location of the installation path of the DB. --num_connections Number of connections for Oracle RMAN [default: 5] + --logsync Enable logsync + [default: True] + --logsync_mode Logsync mode + [default: UNDEFINED] + --single_thread Run as a single thread. False if running multiple + threads. + [default: True] --link_now Link the dSource [default: True] --files_per_set Configures how many files per set for Oracle RMAN [default: 5] --rman_channels Configures the number of Oracle RMAN Channels [default: 2] - --dx_group Group where the dSource will reside --create_bckup Create and ingest a new Sybase backup --db_user Username of the dSource DB --db_passwd Password of the db_user --bck_file Fully qualified name of backup file - --port_num Port number of the listener. Default: 1521 + --port_num Port number of the listener. + [default: 1521] --src_config Name of the configuration environment --ase_passwd ASE DB password --ase_user ASE username --backup_path Path to the ASE/MSSQL backups - --sync_mode MSSQL validated sync mode - [TRANSACTION_LOG|FULL_OR_DIFFERENTIAL|FULL|NONE] + --val_sync_mode MSSQL validated sync mode + TRANSACTION_LOG|FULL_OR_DIFFERENTIAL|FULL|NONE + [default: FULL] --source_user Environment username + [default: delphix] --stage_user Stage username + [default: delphix] --stage_repo Stage repository --stage_instance Name of the PPT instance --stage_env Name of the PPT server - --logsync Enable logsync - --backup_loc_passwd Password of the shared backup path (--bckup_path) - --backup_loc_user User of the shared backup path (--bckup_path) - --load_from_backup If set, Delphix will try to load the most recent full backup (MSSQL only) + --backup_loc_passwd Password of the shared backup path + --backup_loc_user User of the shared backup path + --delphix_managed Delphix Managed Backups ( MSSQL) + [default: False ] + --init_load_type Type of backup to create the dSource from + RECENT|SPECIFIC|COPY_ONLY (MSSQL only) + [default: RECENT] + --backup_uuid If init_load_type is SPECIFIC, provide the + backupset uuid (MSSQL only) --dsource_name Name of the dSource --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --debug Enable debug logging - --parallel Limit number of jobs to maxjob + [default: default] --poll The number of seconds to wait between job polls [default: 10] --config The path to the dxtools.conf file - [default: ./dxtools.conf] + [default: ./config/dxtools.conf] --logdir The path to the logfile you want to use. - [default: ./dx_provision_dsource.log] + [default: ./logs/dx_provision_dsource.log] -h --help Show this screen. -v --version Show version. """ -VERSION = 'v.0.2.0018' - import sys +import time from os.path import basename -from time import sleep, time -from docopt import docopt, DocoptExit - -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.web import sourceconfig -from delphixpy.v1_8_0.web import group -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web import environment -from delphixpy.v1_8_0.web import repository -from delphixpy.v1_8_0.web import database -from delphixpy.v1_8_0.web.vo import OracleSIConfig -from delphixpy.v1_8_0.web.vo import OracleInstance -from delphixpy.v1_8_0.web.vo import LinkParameters -from delphixpy.v1_8_0.web.vo import OracleLinkData -from delphixpy.v1_8_0.web.vo import OracleSourcingPolicy -from delphixpy.v1_8_0.web.vo import ASELinkData -from delphixpy.v1_8_0.web.vo import ASELatestBackupSyncParameters -from delphixpy.v1_8_0.web.vo import ASENewBackupSyncParameters -from delphixpy.v1_8_0.web.vo import ASESpecificBackupSyncParameters -from delphixpy.v1_8_0.web.vo import MSSqlLinkData -from delphixpy.v1_8_0.web.vo import SourcingPolicy - -from lib.DlpxException import DlpxException -from lib.GetReferences import find_obj_by_name -from lib.GetReferences import find_dbrepo -from lib.GetReferences import get_running_job -from lib.DxLogging import logging_est -from lib.DxLogging import print_debug -from lib.DxLogging import print_info -from lib.DxLogging import print_exception -from lib.GetSession import GetSession - - -def create_ora_sourceconfig(engine_name, port_num=1521): - """ - :param ip_addr: - :param db_name: - :return: - """ - create_ret = None - env_obj = find_obj_by_name(dx_session_obj.server_session, environment, - arguments['--env_name']) - - try: - sourceconfig_ref = find_obj_by_name(dx_session_obj.server_session, - sourceconfig, - arguments['--db_name']).reference - except DlpxException: - sourceconfig_ref = None - - repo_ref = find_dbrepo(dx_session_obj.server_session, - 'OracleInstall', env_obj.reference, - arguments['--db_install_path']).reference - - dsource_params = OracleSIConfig() - - connect_str = ('jdbc:oracle:thin:@' + arguments['--ip_addr'] + ':' + - str(port_num) + ':' + arguments['--db_name']) - - dsource_params.database_name = arguments['--db_name'] - dsource_params.unique_name = arguments['--db_name'] - dsource_params.repository = repo_ref - dsource_params.instance = OracleInstance() - dsource_params.instance.instance_name = arguments['--db_name'] - dsource_params.instance.instance_number = 1 - dsource_params.services = [{'type': 'OracleService', - 'jdbcConnectionString': connect_str}] - - try: - if sourceconfig_ref is None: - create_ret = link_ora_dsource(sourceconfig.create( - dx_session_obj.server_session, dsource_params), - env_obj.primary_user) - elif sourceconfig_ref is not None: - create_ret = link_ora_dsource(sourceconfig_ref, - env_obj.primary_user) - - print_info('Created and linked the dSource {} with reference {}.\n'.format( - arguments['--db_name'], create_ret)) - link_job_ref = dx_session_obj.server_session.last_job - link_job_obj = job.get(dx_session_obj.server_session, - link_job_ref) - while link_job_obj.job_state not in ["CANCELED", "COMPLETED", "FAILED"]: - print_info('Waiting three seconds for link job to complete, and sync to begin') - sleep(3) - link_job_obj = job.get(dx_session_obj.server_session, - link_job_ref) - #Add the snapsync job to the jobs dictionary - dx_session_obj.jobs[engine_name + 'snap'] = get_running_job( - dx_session_obj.server_session, find_obj_by_name( - dx_session_obj.server_session, database, - arguments['--dsource_name']).reference) - print_debug('Snapshot Job Reference: {}.\n'.format( - dx_session_obj.jobs[engine_name + 'snap'])) - except (HttpError, RequestError) as e: - print_exception('ERROR: Could not create the sourceconfig:\n' - '{}'.format(e)) - sys.exit(1) +import docopt +from delphixpy.v1_10_2 import exceptions +from lib import dlpx_exceptions +from lib import dsource_link_ase +from lib import dsource_link_mssql +from lib import dsource_link_oracle +from lib import dx_logging +from lib import get_session +from lib import run_job +from lib.run_async import run_async -def link_ora_dsource(srcconfig_ref, primary_user_ref): - """ - :param srcconfig_ref: Reference to the sourceconfig object - :param primary_user_ref: Reference to the environment user - :return: Reference of the linked dSource - """ - - link_params = LinkParameters() - link_params.link_data = OracleLinkData() - link_params.link_data.sourcing_policy = OracleSourcingPolicy() - link_params.name = arguments['--dsource_name'] - link_params.group = find_obj_by_name(dx_session_obj.server_session, group, - arguments['--dx_group']).reference - link_params.link_data.compressedLinkingEnabled = True - link_params.link_data.environment_user = primary_user_ref - link_params.link_data.db_user = arguments['--db_user'] - link_params.link_data.number_of_connections = \ - int(arguments['--num_connections']) - link_params.link_data.link_now = bool(arguments['--link_now']) - link_params.link_data.files_per_set = int(arguments['--files_per_set']) - link_params.link_data.rman_channels = int(arguments['--rman_channels']) - link_params.link_data.db_credentials = {'type': 'PasswordCredential', - 'password': - arguments['--db_passwd']} - link_params.link_data.sourcing_policy.logsync_enabled = True - #link_params.link_data.sourcing_policy.logsync_mode = 'ARCHIVE_REDO_MODE' - link_params.link_data.config = srcconfig_ref - try: - return database.link(dx_session_obj.server_session, link_params) - except (RequestError, HttpError) as e: - print_exception('Database link failed for {}:\n{}\n'.format( - arguments['--dsource_name'], e)) - sys.exit(1) - - -def link_mssql_dsource(engine_name): - """ - Link an MSSQL dSource - """ - link_params = LinkParameters() - link_params.name = arguments['--dsource_name'] - link_params.link_data = MSSqlLinkData() - - try: - env_obj_ref = find_obj_by_name(dx_session_obj.server_session, - environment, - arguments['--stage_env']).reference - - link_params.link_data.ppt_repository = find_dbrepo( - dx_session_obj.server_session, 'MSSqlInstance', env_obj_ref, - arguments['--stage_instance']).reference - link_params.link_data.config = find_obj_by_name( - dx_session_obj.server_session, sourceconfig, - arguments['--dsource_name']).reference - link_params.group = find_obj_by_name(dx_session_obj.server_session, - group, - arguments['--dx_group']).reference - - except DlpxException as e: - print_exception('Could not link {}: {}\n'.format( - arguments['--dsource_name'], e)) - sys.exit(1) - - if arguments['--backup_path'] != "auto": - link_params.link_data.shared_backup_location = arguments['--backup_path'] - - if arguments['--backup_loc_passwd']: - link_params.link_data.backup_location_credentials = {'type': - 'PasswordCredential', - 'password': - arguments['--backup_loc_passwd']} - link_params.link_data.backup_location_user = \ - arguments['--backup_loc_user'] - - link_params.link_data.db_credentials = {'type': 'PasswordCredential', - 'password': - arguments['--db_passwd']} - link_params.link_data.db_user = arguments['--db_user'] - - link_params.link_data.sourcing_policy = SourcingPolicy() - - if arguments['--load_from_backup']: - link_params.link_data.sourcing_policy.load_from_backup = True - - if arguments['--sync_mode']: - link_params.link_data.validated_sync_mode = arguments['sync_mode'] - - if arguments['--logsync']: - link_params.link_data.sourcing_policy.logsync_enabled = True - - try: - database.link(dx_session_obj.server_session, link_params) - dx_session_obj.jobs[engine_name] = dx_session_obj.server_session.last_job - dx_session_obj.jobs[engine_name + 'snap'] = get_running_job( - dx_session_obj.server_session, find_obj_by_name( - dx_session_obj.server_session, database, - arguments['--dsource_name']).reference) - - except (HttpError, RequestError, JobError) as e: - print_exception('Database link failed for {}:\n{}\n'.format( - arguments['--dsource_name'], e)) - - -def link_ase_dsource(engine_name): - """ - Link an ASE dSource - """ - - link_params = LinkParameters() - link_params.name = arguments['--dsource_name'] - link_params.link_data = ASELinkData() - link_params.link_data.db_credentials = {'type': 'PasswordCredential', - 'password': - arguments['--ase_passwd']} - link_params.link_data.db_user = arguments['--ase_user'] - link_params.link_data.load_backup_path = arguments['--backup_path'] - - if arguments['--bck_file']: - link_params.link_data.sync_parameters = \ - ASESpecificBackupSyncParameters() - bck_files = (arguments['--bck_file']).split(' ') - link_params.link_data.sync_parameters.backup_files = bck_files - - elif arguments['--create_bckup']: - link_params.link_data.sync_parameters = ASENewBackupSyncParameters() - - else: - link_params.link_data.sync_parameters = ASELatestBackupSyncParameters() - - try: - link_params.group = find_obj_by_name( - dx_session_obj.server_session, group, - arguments['--dx_group']).reference - env_user_ref = link_params.link_data.stage_user = find_obj_by_name( - dx_session_obj.server_session, environment, - arguments['--env_name']).primary_user - link_params.link_data.staging_host_user = env_user_ref - link_params.link_data.source_host_user = env_user_ref - - link_params.link_data.config = find_obj_by_name( - dx_session_obj.server_session, sourceconfig, - arguments['--src_config']).reference - link_params.link_data.staging_repository = find_obj_by_name( - dx_session_obj.server_session, repository, - arguments['--stage_repo']).reference - - except DlpxException as e: - print_exception('Could not link {}: {}\n'.format( - arguments['--dsource_name'], e)) - sys.exit(1) - - try: - dsource_ref = database.link(dx_session_obj.server_session, link_params) - dx_session_obj.jobs[engine_name] = dx_session_obj.server_session.last_job - dx_session_obj.jobs[engine_name + 'snap'] = get_running_job( - dx_session_obj.server_session, find_obj_by_name( - dx_session_obj.server_session, database, - arguments['--dsource_name']).reference) - print '{} sucessfully linked {}'.format(dsource_ref, - arguments['--dsource_name']) - except (RequestError, HttpError) as e: - print_exception('Database link failed for {}:\n{}'.format( - arguments['--dsource_name'], e)) - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - E.g.: - @run_async - def task1(): - do_something - @run_async - def task2(): - do_something_too - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func +VERSION = "v.0.3.004" @run_async -def main_workflow(engine): +def main_workflow(engine, dlpx_obj, single_thread): """ - This function actually runs the jobs. + This function is where we create our main workflow. Use the @run_async decorator to run this function asynchronously. - This allows us to run against multiple Delphix Engine simultaneously - - engine: Dictionary of engines + The @run_async decorator allows us to run against multiple Delphix Engine + simultaneously + :param engine: Dictionary of engines + :type engine: dictionary + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param single_thread: True - run single threaded, False - run multi-thread + :type single_thread: bool """ - jobs = {} - try: - #Setup the connection to the Delphix Engine - dx_session_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - - except DlpxException as e: - print_exception('\nERROR: Engine {} encountered an error while' - '{}:\n{}\n'.format( - dx_session_obj.dlpx_engines['hostname'], - arguments['--target'], e)) - sys.exit(1) - thingstodo = ["thingtodo"] + # Setup the connection to the Delphix DDP + dlpx_obj.dlpx_session( + engine["ip_address"], engine["username"], engine["password"] + ) + except dlpx_exceptions.DlpxException as err: + dx_logging.print_exception( + f"ERROR: {basename(__file__)} encountered an error authenticating" + f' to {engine["hostname"]} {ARGUMENTS["--target"]}:\n{err}' + ) + thingstodo = ["thingstodo"] try: - with dx_session_obj.job_mode(single_thread): - while (len(dx_session_obj.jobs) > 0 or len(thingstodo)> 0): - if len(thingstodo) > 0: - if arguments['--type'].lower() == 'oracle': - create_ora_sourceconfig(engine["hostname"]) - elif arguments['--type'].lower() == 'sybase': - link_ase_dsource(engine["hostname"]) - elif arguments['--type'].lower() == 'mssql': - link_mssql_dsource(engine["hostname"]) - thingstodo.pop() - # get all the jobs, then inspect them - i = 0 - for j in dx_session_obj.jobs.keys(): - job_obj = job.get(dx_session_obj.server_session, - dx_session_obj.jobs[j]) - print_debug(job_obj) - print_info('{}: Provisioning dSource: {}'.format( - engine['hostname'], job_obj.job_state)) - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - # If the job is in a non-running state, remove it - # from the - # running jobs list. - del dx_session_obj.jobs[j] - elif job_obj.job_state in 'RUNNING': - # If the job is in a running state, increment the - # running job count. - i += 1 - print_info('{}: {:d} jobs running.'.format( - engine['hostname'], i)) - # If we have running jobs, pause before repeating the - # checks. - if len(dx_session_obj.jobs) > 0: - sleep(float(arguments['--poll'])) - - except (HttpError, RequestError, JobError, DlpxException) as e: - print_exception('ERROR: Could not complete ingesting the source ' - 'data:\n{}'.format(e)) - sys.exit(1) - - - -def run_job(): + with dlpx_obj.job_mode(single_thread): + if ARGUMENTS["--type"].lower() == "oracle": + linked_ora = dsource_link_oracle.DsourceLinkOracle( + dlpx_obj, + ARGUMENTS["--dsource_name"], + ARGUMENTS["--db_passwd"], + ARGUMENTS["--db_user"], + ARGUMENTS["--dx_group"], + ARGUMENTS["--logsync"], + ARGUMENTS["--logsync_mode"], + ARGUMENTS["--type"], + ) + linked_ora.get_or_create_ora_sourcecfg( + ARGUMENTS["--env_name"], + ARGUMENTS["--envinst"], + ARGUMENTS["--ip_addr"], + ARGUMENTS["--port_num"], + ) + elif ARGUMENTS["--type"].lower() == "sybase": + ase_obj = dsource_link_ase.DsourceLinkASE( + dlpx_obj, + ARGUMENTS["--dsource_name"], + ARGUMENTS["--db_passwd"], + ARGUMENTS["--db_user"], + ARGUMENTS["--dx_group"], + ARGUMENTS["--logysnc"], + ARGUMENTS["--type"], + ) + ase_obj.link_ase_dsource( + ARGUMENTS["--backup_path"], + ARGUMENTS["--bck_file"], + ARGUMENTS["--create_backup"], + ARGUMENTS["--env_name"], + ARGUMENTS["--stage_repo"], + ) + elif ARGUMENTS["--type"].lower() == "mssql": + mssql_obj = dsource_link_mssql.DsourceLinkMssql( + dlpx_obj, + ARGUMENTS["--dsource_name"], + ARGUMENTS["--db_passwd"], + ARGUMENTS["--db_user"], + ARGUMENTS["--dx_group"], + ARGUMENTS["--type"], + ARGUMENTS["--logsync"], + ARGUMENTS["--val_sync_mode"], + ARGUMENTS["--init_load_type"], + ARGUMENTS["--delphix_managed"], + ) + mssql_obj.get_or_create_mssql_sourcecfg( + ARGUMENTS["--env_name"], + ARGUMENTS["--envinst"], + ARGUMENTS["--stage_env"], + ARGUMENTS["--stage_instance"], + ARGUMENTS["--backup_path"], + ARGUMENTS["--backup_loc_passwd"], + ARGUMENTS["--backup_loc_user"], + ARGUMENTS["--ip_addr"], + ARGUMENTS["--port_num"], + ARGUMENTS["--backup_uuid"], + ) + run_job.track_running_jobs(engine, dlpx_obj) + except ( + dlpx_exceptions.DlpxException, + exceptions.RequestError, + exceptions.JobError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f'ERROR: {basename(__file__)}: {engine["ip_address"]}\n{err}' + ) + + +def main(): """ - This function runs the main_workflow aynchronously against all the servers - specified + main function - creates session and runs jobs """ - #Create an empty list to store threads we create. - threads = [] - engine = None - - #If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info("Executing against all Delphix Engines in the dxtools.conf") - - try: - #For each server in the dxtools.conf... - for delphix_engine in dx_session_obj.dlpx_engines: - engine = dx_session_obj[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine)) - - except DlpxException as e: - print 'Error encountered in run_job():\n%s' % (e) - sys.exit(1) - - elif arguments['--all'] is False: - #Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dx_session_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: {}\n'.format( - (arguments['--engine']))) - - except (DlpxException, RequestError, KeyError) as e: - raise DlpxException('\nERROR: Delphix Engine {} cannot be ' - 'found in {}. Please check your value ' - 'and try again. Exiting.\n'.format( - arguments['--engine'], config_file_path)) - - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dx_session_obj.dlpx_engines: - if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ - 'true': - - engine = dx_session_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: %s' % ( - dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) - - break - - if engine == None: - raise DlpxException("\nERROR: No default engine found. Exiting") - - #run the job against the engine - threads.append(main_workflow(engine)) - - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def time_elapsed(): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - """ - elapsed_minutes = round((time() - time_start)/60, +1) - return elapsed_minutes - - -def main(argv): - #We want to be able to call on these variables anywhere in the script. - global single_thread - global usebackup - global time_start - global config_file_path - global database_name - global dx_session_obj - global debug - - if arguments['--debug']: - debug = True - + time_start = time.time() try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - time_start = time() - engine = None - single_thread = False - config_file_path = arguments['--config'] - #Parse the dxtools.conf and put it into a dictionary + dx_session_obj = get_session.GetSession() + dx_logging.logging_est(ARGUMENTS["--logdir"]) + config_file_path = ARGUMENTS["--config"] + single_thread = ARGUMENTS["--single_thread"] + engine = ARGUMENTS["--engine"] dx_session_obj.get_config(config_file_path) - - #This is the function that will handle processing main_workflow for + # This is the function that will handle processing main_workflow for # all the servers. - run_job() - - elapsed_minutes = time_elapsed() - print_info('script took {} minutes to get this far.'.format( - str(elapsed_minutes))) - - #Here we handle what we do when the unexpected happens - except SystemExit as e: - """ - This is what we use to handle our sys.exit(#) - """ - sys.exit(e) - - except HttpError as e: - """ - We use this exception handler when our connection to Delphix fails - """ - print_exception('Connection failed to the Delphix Engine' - 'Please check the ERROR message below:\n{}'.format(e)) - sys.exit(1) - - except JobError as e: - """ - We use this exception handler when a job fails in Delphix so that - we have actionable data - """ - elapsed_minutes = time_elapsed() - print_exception('A job failed in the Delphix Engine') - print_info('{} took {:.2f} minutes to get this far:\n{}\n'.format( - basename(__file__), elapsed_minutes, e)) + for each in run_job.run_job_mt( + main_workflow, dx_session_obj, engine, single_thread + ): + each.join() + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info(f"script took {elapsed_minutes} minutes to complete") + # Here we handle what we do when the unexpected happens + except SystemExit as err: + # This is what we use to handle our sys.exit(#) + sys.exit(err) + + except dlpx_exceptions.DlpxException as err: + # We use this exception handler when an error occurs in a function + # call. + dx_logging.print_exception(f"ERROR: {err.error}") + sys.exit(2) + + except exceptions.HttpError as err: + # We use this exception handler when our connection to Delphix fails + dx_logging.print_exception( + f"ERROR: Connection failed to the Delphix DDP." f"Message: {err.status}" + ) + sys.exit(2) + + except exceptions.JobError as err: + # We use this exception handler when a job fails in Delphix so that we + # have actionable data + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_exception( + f"A job failed in the Delphix Engine:\n{err.job}." + f"{basename(__file__)} took {elapsed_minutes} minutes complete " + ) sys.exit(3) except KeyboardInterrupt: - """ - We use this exception handler to gracefully handle ctrl+c exits - """ - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far\n'.format( - basename(__file__), elapsed_minutes)) + # We use this exception handler to gracefully handle ctrl+c exits + dx_logging.print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"{basename(__file__)} took {elapsed_minutes} " f"minutes to complete." + ) - except: - """ - Everything else gets caught here - """ - print_exception(sys.exc_info()[0]) - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far\n'.format( - basename(__file__), elapsed_minutes)) - sys.exit(1) if __name__ == "__main__": - - try: - #Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - #Feed our arguments to the main function, and off we go! - main(arguments) - - except DocoptExit as e: - #print 'Exited because options were not specified: {}\n'.format(e) - print (e.message) - + # Grab our ARGUMENTS from the doc at the top of the script + ARGUMENTS = docopt.docopt(__doc__, version=basename(__file__) + " " + VERSION) + # Feed our ARGUMENTS to the main function, and off we go! + main() diff --git a/dx_provision_vdb.py b/dx_provision_vdb.py index dcdfe71..c5365f8 100755 --- a/dx_provision_vdb.py +++ b/dx_provision_vdb.py @@ -1,66 +1,70 @@ -#!/usr/bin/env python -#Adam Bowen - Apr 2016 -#This script provisions a vdb or dSource +#!/usr/bin/env python3 +# Adam Bowen - Apr 2016 +# This script provisions a vdb or dSource # Updated by Corey Brune Aug 2016 # --- Create vFiles VDB -#requirements -#pip install docopt delphixpy +# requirements +# pip install docopt delphixpy -#The below doc follows the POSIX compliant standards and allows us to use -#this doc to also define our arguments for the script. - -#TODO: -# Refactor provisioning functions -# Documentation +# The below doc follows the POSIX compliant standards and allows us to use +# this doc to also define our ARGUMENTS for the script. """Provision VDB's Usage: - dx_provision_db.py --source --target_grp --target - (--db | --vfiles_path ) [--no_truncate_log] - (--environment --type ) [ --envinst ] - [--template ] [--mapfile ] - [--timestamp_type ] [--timestamp ] - [--timeflow ] - [--instname ] [--mntpoint ] [--noopen] - [--uniqname ][--source_grp ] - [--engine | --all] - [--vdb_restart ] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - [--postrefresh ] [--prerefresh ] - [--configure-clone ] - [--prerollback ] [--postrollback ] - dx_provision_db.py -h | --help | -v | --version + dx_provision_db.py --source --target_grp --db \ + --env_name --type + [--envinst --timestamp_type ] + [--timestamp --timeflow --no_truncate_log] + [--instname --mntpoint --uniqname --engine ] + [--single_thread --vdb_restart --parallel --poll ] + [--config --logdir --postrefresh ] + [--prerefresh --configure-clone --prerollback ] + [--postrollback --logsync] + dx_provision_db.py --help | --version Provision VDB from a defined source on the defined target environment. Examples: - dx_provision_vdb.py --engine landsharkengine --source_grp Sources --source "ASE pubs3 DB" --db vase --target testASE --target_grp Analytics --environment LINUXTARGET --type ase --envinst "LINUXTARGET" - - dx_provision_vdb.py --source_grp Sources --source "Employee Oracle 11G DB" --instname autod --uniqname autoprod --db autoprod --target autoprod --target_grp Analytics --environment LINUXTARGET --type oracle --envinst "/u01/app/oracle/product/11.2.0/dbhome_1" - - dx_provision_vdb.py --source_grp Sources --source "AdventureWorksLT2008R2" --db vAW --target testAW --target_grp Analytics --environment WINDOWSTARGET --type mssql --envinst MSSQLSERVER --all - - dx_provision_vdb.py --source UF_Source --target appDataVDB --target_grp Untitled --environment LinuxTarget --type vfiles --vfiles_path /mnt/provision/appDataVDB --prerollback "/u01/app/oracle/product/scripts/PreRollback.sh" --postrollback "/u01/app/oracle/product/scripts/PostRollback.sh" --vdb_restart true + dx_provision_vdb.py --engine landsharkengine --logsync \ + --source "ASE pubs3 DB" --db vase --target_grp Analytics \ + --env_name LINUXTARGET --type ase --envinst "LINUXTARGET" + + dx_provision_vdb.py --source "Employee Oracle 11G DB" \ + --instname autod --uniqname autoprod --db autoprod --target autoprod \ + --target_grp Analytics --env_name LINUXTARGET --type oracle \ + --envinst "/u01/app/oracle/product/11.2.0/dbhome_1" + + dx_provision_vdb.py --source "AdventureWorksLT2008R2" \ + --db vAW --target_grp Analytics --env_name WINDOWSTARGET \ + --type mssql --envinst MSSQLSERVER --all + + dx_provision_vdb.py --source UF_Source --db appDataVDB \ + --target_grp Untitled --env_name LinuxTarget --type vfiles \ + --mntpoint /mnt/provision/appDataVDB \ + --prerollback "/u01/app/oracle/product/scripts/PreRollback.sh" \ + --postrollback "/u01/app/oracle/product/scripts/PostRollback.sh" \ + --vdb_restart true + + dx_provision_vdb.py --source sPDB1 --db vPDB1 --target_grp dev \ + --env_name LinuxTarget --type oramt \ + --envinst /u01/app/oracle/product/18.0.0.0/dbhome_1 Options: - --source_grp The group where the source resides. - --source Name of the source object + --source Name of the source object --target_grp The group into which Delphix will place the VDB. - --target The unique name that you want to call this object - in Delphix --db The name you want to give the database (Oracle Only) - --vfiles_path The full path on the Target server where Delphix - will provision the vFiles --no_truncate_log Don't truncate log on checkpoint (ASE only) - --environment The name of the Target environment in Delphix + --env_name The name of the Target environment in Delphix --type The type of VDB this is. - oracle | mssql | ase | vfiles - --prerefresh Pre-Hook commands - --postrefresh Post-Hook commands - --prerollback Post-Hook commands - --postrollback Post-Hook commands + oracle | oramt | mssql | ase | vfiles + --logsync Enable logsync + --prerefresh Pre-Hook commands before a refresh + --postrefresh Post-Hook commands after a refresh + --prerollback Post-Hook commands before a rollback + --postrollback Post-Hook commands after a rollback --configure-clone Configure Clone commands - --vdb_restart Either True or False. Default: False + --vdb_restart Automatically start VDBs after a host reboot. + Either True or False. + [default: True] --envinst The identifier of the instance in Delphix. ex. "/u01/app/oracle/product/11.2.0/dbhome_1" ex. LINUXTARGET @@ -77,1064 +81,704 @@ snapshot name: "@YYYY-MM-DDTHH24:MI:SS.ZZZ" snapshot time from GUI: "YYYY-MM-DD HH24:MI" [default: LATEST] - --template Target VDB Template name (Oracle Only) - --mapfile Target VDB mapping file (Oracle Only) --instname Target VDB SID name (Oracle Only) --uniqname Target VDB db_unique_name (Oracle Only) --mntpoint Mount point for the VDB [default: /mnt/provision] - --noopen Don't open database after provision (Oracle Only) - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --debug Enable debug logging + --engine Identifier of Delphix engine in dxtools.conf. + [default: default] --parallel Limit number of jobs to maxjob --poll The number of seconds to wait between job polls - [default: 10] + [default: 5] --config The path to the dxtools.conf file - [default: ./dxtools.conf] + [default: ./config/dxtools.conf] --logdir The path to the logfile you want to use. - [default: ./dx_provision_vdb.log] + [default: ./logs/dx_provision_vdb.log] -h --help Show this screen. -v --version Show version. """ - -VERSION = 'v.0.2.305' - -import signal import sys import time -import traceback -import re -from docopt import docopt from os.path import basename -from time import sleep, time - -from delphixpy.v1_8_0.delphix_engine import DelphixEngine -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.web import database -from delphixpy.v1_8_0.web import environment -from delphixpy.v1_8_0.web import group -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web import repository -from delphixpy.v1_8_0.web import snapshot -from delphixpy.v1_8_0.web import source -from delphixpy.v1_8_0.web.database import template -from delphixpy.v1_8_0.web.vo import VirtualSourceOperations -from delphixpy.v1_8_0.web.vo import OracleDatabaseContainer -from delphixpy.v1_8_0.web.vo import OracleInstance -from delphixpy.v1_8_0.web.vo import OracleProvisionParameters -from delphixpy.v1_8_0.web.vo import OracleSIConfig -from delphixpy.v1_8_0.web.vo import OracleVirtualSource -from delphixpy.v1_8_0.web.vo import TimeflowPointLocation -from delphixpy.v1_8_0.web.vo import TimeflowPointSemantic -from delphixpy.v1_8_0.web.vo import TimeflowPointTimestamp -from delphixpy.v1_8_0.web.vo import ASEDBContainer -from delphixpy.v1_8_0.web.vo import ASEInstanceConfig -from delphixpy.v1_8_0.web.vo import ASEProvisionParameters -from delphixpy.v1_8_0.web.vo import ASESIConfig -from delphixpy.v1_8_0.web.vo import ASEVirtualSource -from delphixpy.v1_8_0.web.vo import MSSqlProvisionParameters -from delphixpy.v1_8_0.web.vo import MSSqlDatabaseContainer -from delphixpy.v1_8_0.web.vo import MSSqlVirtualSource -from delphixpy.v1_8_0.web.vo import MSSqlSIConfig -from delphixpy.v1_8_0.web.vo import AppDataVirtualSource -from delphixpy.v1_8_0.web.vo import AppDataProvisionParameters -from delphixpy.v1_8_0.web.vo import AppDataDirectSourceConfig - -from lib.DxTimeflow import DxTimeflow -from lib.DlpxException import DlpxException -from lib.GetSession import GetSession -from lib.GetReferences import find_dbrepo -from lib.GetReferences import find_obj_by_name -from lib.DxLogging import logging_est -from lib.DxLogging import print_info -from lib.DxLogging import print_debug - -def create_ase_vdb(engine, server, jobs, vdb_group, vdb_name, environment_obj, - container_obj): - ''' - Create a Sybase ASE VDB - ''' - vdb_obj = find_database_by_name_and_group_name(engine, server, - vdb_group.name, vdb_name) - if vdb_obj == None: - vdb_params = ASEProvisionParameters() - vdb_params.container = ASEDBContainer() - if arguments['--no_truncate_log']: - vdb_params.truncate_log_on_checkpoint = False - else: - vdb_params.truncate_log_on_checkpoint = True - vdb_params.container.group = vdb_group.reference - vdb_params.container.name = vdb_name - vdb_params.source = ASEVirtualSource() - vdb_params.source_config = ASESIConfig() - vdb_params.source_config.database_name = arguments['--db'] - vdb_params.source_config.instance = ASEInstanceConfig() - vdb_params.source_config.instance.host = environment_obj.host - - vdb_repo = find_dbrepo_by_environment_ref_and_name(engine, server, - "ASEInstance", - environment_obj.reference, - arguments['--envinst']) - - vdb_params.source_config.repository = vdb_repo.reference - vdb_params.timeflow_point_parameters = set_timeflow_point(engine, - server, - container_obj) - - vdb_params.timeflow_point_parameters.container = container_obj.reference - print_info("Provisioning " + vdb_name) - database.provision(server, vdb_params) - - #Add the job into the jobs dictionary so we can track its progress - jobs[engine["hostname"]] = server.last_job - #return the job object to the calling statement so that we can tell if - # a job was created or not (will return None, if no job) - return server.last_job - else: - print_info(engine["hostname"] + ": " + vdb_name + " already exists.") - return vdb_obj.reference - - -def create_mssql_vdb(engine, jobs, vdb_group, vdb_name, - environment_obj, container_obj): - ''' - Create a MSSQL VDB - engine: - jobs: - vdb_group: +import docopt + +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import database +from delphixpy.v1_10_2.web import environment +from delphixpy.v1_10_2.web import group +from delphixpy.v1_10_2.web import repository +from delphixpy.v1_10_2.web import sourceconfig +from delphixpy.v1_10_2.web import vo +from lib import dlpx_exceptions +from lib import dx_logging +from lib import dx_timeflow +from lib import get_references +from lib import get_session +from lib import run_job +from lib.run_async import run_async + +VERSION = "v.0.3.007" + + +def create_ase_vdb( + dlpx_obj, + group_ref, vdb_name, - environment_obj: - container_obj: - - ''' - vdb_obj = find_database_by_name_and_group_name(engine, dx_session_obj.server_session, - vdb_group.name, vdb_name) - if vdb_obj == None: - vdb_params = MSSqlProvisionParameters() - vdb_params.container = MSSqlDatabaseContainer() - vdb_params.container.group = vdb_group.reference - vdb_params.container.name = vdb_name - vdb_params.source = MSSqlVirtualSource() - vdb_params.source.allow_auto_vdb_restart_on_host_reboot = False - vdb_params.source_config = MSSqlSIConfig() - vdb_params.source_config.database_name = arguments['--db'] - - vdb_params.source_config.repository = find_dbrepo( - dx_session_obj.server_session, 'MSSqlInstance', environment_obj.reference, - arguments['--envinst']).reference - - vdb_params.timeflow_point_parameters = set_timeflow_point(engine, - dx_session_obj.server_session, - container_obj) - if not vdb_params.timeflow_point_parameters: - return - vdb_params.timeflow_point_parameters.container = \ - container_obj.reference - print_info(engine["hostname"] + ":Provisioning " + vdb_name) - database.provision(dx_session_obj.server_session, vdb_params) - #Add the job into the jobs dictionary so we can track its progress - jobs[engine["hostname"]] = dx_session_obj.server_session.last_job - #return the job object to the calling statement so that we can tell if - # a job was created or not (will return None, if no job) - return dx_session_obj.server_session.last_job - else: - print_info(engine["hostname"] + ": " + vdb_name + " already exists.") - return vdb_obj.reference - - -def create_vfiles_vdb(engine, jobs, vfiles_group, vfiles_name, - environment_obj, container_obj, pre_refresh=None, - post_refresh=None, pre_rollback=None, - post_rollback=None, configure_clone=None): - ''' - Create a Vfiles VDB - ''' - - vfiles_obj = None - + source_obj, + env_inst, + timestamp, + timestamp_type="SNAPSHOT", + no_truncate_log=False, +): + """ + Create a Sybase ASE VDB + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param group_ref: Reference of Group name where the VDB will be created + :type group_ref: str + :param vdb_name: Name of the VDB + :type vdb_name: str + :param source_obj: Database object of the source + :type source_obj: class + delphixpy.v1_10_2.web.objects.UnixHostEnvironment.UnixHostEnvironment + :param env_inst: Environment installation identifier in Delphix. + EX: "/u01/app/oracle/product/11.2.0/dbhome_1" + EX: ASETARGET + :type env_inst: str + :param timestamp: The Delphix semantic for the point in time on the + source from which to refresh the VDB + :type timestamp: str + :param timestamp_type: The Delphix semantic for the point in time on + the source from which you want to refresh your VDB either SNAPSHOT or TIME + :type timestamp_type: str + :param no_truncate_log: Don't truncate log on checkpoint + :type no_truncate_log: bool + :return: + """ + engine_name = list(dlpx_obj.dlpx_ddps)[0] + dx_timeflow_obj = dx_timeflow.DxTimeflow(dlpx_obj.server_session) try: - vfiles_obj = find_obj_by_name(dx_session_obj.server_session, - database, vfiles_name) - except DlpxException: + vdb_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, database, vdb_name + ) + raise dlpx_exceptions.DlpxObjectExists(f"{vdb_obj} exists.") + except dlpx_exceptions.DlpxObjectNotFound: pass - - if vfiles_obj is None: - vfiles_repo = find_repo_by_environment_ref(engine, - 'Unstructured Files', - environment_obj.reference) - - vfiles_params = AppDataProvisionParameters() - vfiles_params.source = AppDataVirtualSource() - vfiles_params.source_config = AppDataDirectSourceConfig() - - vdb_restart_reobj = re.compile('true', re.IGNORECASE) - - if vdb_restart_reobj.search(str(arguments['--vdb_restart'])): - vfiles_params.source.allow_auto_vdb_restart_on_host_reboot = True - - elif vdb_restart_reobj.search(str(arguments['--vdb_restart'])) is None: - vfiles_params.source.allow_auto_vdb_restart_on_host_reboot = False - - vfiles_params.container = { 'type': 'AppDataContainer', - 'group': vfiles_group.reference, - 'name': vfiles_name } - - vfiles_params.source_config.name = arguments['--target'] - vfiles_params.source_config.path = arguments['--vfiles_path'] - vfiles_params.source_config.environment_user = \ - environment_obj.primary_user - vfiles_params.source_config.repository = vfiles_repo.reference - - - vfiles_params.source.parameters = {} - vfiles_params.source.name = vfiles_name - vfiles_params.source.name = vfiles_name - vfiles_params.source.operations = VirtualSourceOperations() - - if pre_refresh: - vfiles_params.source.operations.pre_refresh = [{ 'type': - 'RunCommandOnSourceOperation', - 'command': pre_refresh }] - - if post_refresh: - vfiles_params.source.operations.post_refresh = [{ 'type': - 'RunCommandOnSourceOperation', - 'command': post_refresh }] - - if pre_rollback: - vfiles_params.source.operations.pre_rollback = [{ 'type': - 'RunCommandOnSourceOperation', - 'command': pre_rollback }] - - if post_rollback: - vfiles_params.source.operations.post_rollback = [{ 'type': - 'RunCommandOnSourceOperation', - 'command': post_rollback }] - - if configure_clone: - vfiles_params.source.operations.configure_clone = [{ 'type': - 'RunCommandOnSourceOperation', - 'command': configure_clone }] - - if arguments['--timestamp_type'] is None: - vfiles_params.timeflow_point_parameters = { - 'type': 'TimeflowPointSemantic', - 'container': container_obj.reference, - 'location': 'LATEST_POINT'} - - elif arguments['--timestamp_type'].upper() == 'SNAPSHOT': - - try: - dx_timeflow_obj = DxTimeflow(dx_session_obj.server_session) - dx_snap_params = dx_timeflow_obj.set_timeflow_point( - container_obj, - arguments['--timestamp_type'], - arguments['--timestamp'], - arguments['--timeflow']) - - except RequestError as e: - raise DlpxException('Could not set the timeflow point:\n%s' - % (e)) - - if dx_snap_params.type == 'TimeflowPointSemantic': - vfiles_params.timeflow_point_parameters = {'type': - dx_snap_params.type, - 'container': - dx_snap_params.container, - 'location': - dx_snap_params.location} - - elif dx_snap_params.type == 'TimeflowPointTimestamp': - vfiles_params.timeflow_point_parameters = {'type': - dx_snap_params.type, - 'timeflow': - dx_snap_params.timeflow, - 'timestamp': - dx_snap_params.timestamp} - - print_info('%s: Provisioning %s\n' % (engine["hostname"], - vfiles_name)) - - try: - database.provision(dx_session_obj.server_session, vfiles_params) - - except (JobError, RequestError, HttpError) as e: - raise DlpxException('\nERROR: Could not provision the database:' - '\n%s' % (e)) - - - #Add the job into the jobs dictionary so we can track its progress - jobs[engine['hostname']] = dx_session_obj.server_session.last_job - - #return the job object to the calling statement so that we can tell if - # a job was created or not (will return None, if no job) - return dx_session_obj.server_session.last_job + vdb_params = vo.ASEProvisionParameters() + vdb_params.container = vo.ASEDBContainer() + if no_truncate_log: + vdb_params.truncate_log_on_checkpoint = False else: - print_info('\nERROR %s: %s already exists. \n' % (engine['hostname'], - vfiles_name)) - return vfiles_obj.reference - - -def create_oracle_si_vdb(engine, jobs, vdb_name, vdb_group_obj, - environment_obj, container_obj, pre_refresh=None, - post_refresh=None, pre_rollback=None, - post_rollback=None, configure_clone=None): - - ''' - Create an Oracle SI VDB - ''' - - vdb_obj = None - + vdb_params.truncate_log_on_checkpoint = True + vdb_params.container.group = group_ref + vdb_params.container.name = vdb_name + vdb_params.source = vo.ASEVirtualSource() + vdb_params.source.allow_auto_vdb_restart_on_host_reboot = True + vdb_params.source_config = vo.ASESIConfig() + vdb_params.source_config.database_name = vdb_name + vdb_params.source_config.repository = get_references.find_obj_by_name( + dlpx_obj.server_session, repository, env_inst + ).reference + vdb_params.timeflow_point_parameters = dx_timeflow_obj.set_timeflow_point( + source_obj, timestamp_type, timestamp + ) + vdb_params.timeflow_point_parameters.container = source_obj.reference + dx_logging.print_info(f"{engine_name} provisioning {vdb_name}") + database.provision(dlpx_obj.server_session, vdb_params) + # Add the job into the jobs dictionary so we can track its progress + dlpx_obj.jobs[dlpx_obj.server_session.address] = dlpx_obj.server_session.last_job + + +def create_mssql_vdb( + dlpx_obj, + group_ref, + vdb_name, + environment_obj, + source_obj, + env_inst, + timestamp, + timestamp_type="SNAPSHOT", +): + """ + Create a MSSQL VDB + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param group_ref: Reference of Group name where the VDB will be created + :type group_ref: str + :param vdb_name: Name of the VDB + :type vdb_name: str + :param environment_obj: Environment object where the VDB will be created + :type environment_obj: class 'delphixpy.v1_10_2.web.objects + :param source_obj: Database object of the source + :type source_obj: + :param env_inst: Environment installation identifier in Delphix. + EX: "/u01/app/oracle/product/11.2.0/dbhome_1" + EX: ASETARGET + :type env_inst: str + :param timestamp: The Delphix semantic for the point in time on the + source from which to refresh the VDB + :type timestamp: str + :param timestamp_type: The Delphix semantic for the point in time on + the source from which you want to refresh your VDB either SNAPSHOT or TIME + :type timestamp_type: str + :return: + """ + engine_name = list(dlpx_obj.dlpx_ddps)[0] + timeflow_obj = dx_timeflow.DxTimeflow(dlpx_obj.server_session) try: - vdb_obj = find_obj_by_name(dx_session_obj.server_session, database, - vdb_name) - except DlpxException: + vdb_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, database, vdb_name + ) + raise dlpx_exceptions.DlpxObjectExists(f"{vdb_obj} exists.") + except dlpx_exceptions.DlpxObjectNotFound: pass - - if vdb_obj == None: - vdb_params = OracleProvisionParameters() - vdb_params.open_resetlogs = True - - if arguments['--noopen']: - vdb_params.open_resetlogs = False - - vdb_params.container = OracleDatabaseContainer() - vdb_params.container.group = vdb_group_obj.reference - vdb_params.container.name = vdb_name - vdb_params.source = OracleVirtualSource() - vdb_params.source.allow_auto_vdb_restart_on_host_reboot = False - - if arguments['--instname']: - inst_name = arguments['--instname'] - elif arguments['--instname'] == None: - inst_name = vdb_name - - if arguments['--uniqname']: - unique_name = arguments['--uniqname'] - elif arguments['--uniqname'] == None: - unique_name = vdb_name - - if arguments['--db']: - db = arguments['--db'] - elif arguments['--db'] == None: - db = vdb_name - - vdb_params.source.mount_base = arguments['--mntpoint'] - - if arguments['--mapfile']: - vdb_params.source.file_mapping_rules = arguments['--mapfile'] - - if arguments['--template']: - template_obj = find_obj_by_name(dx_session_obj.server_session, - database.template, - arguments['--template']) - - vdb_params.source.config_template = template_obj.reference - - vdb_params.source_config = OracleSIConfig() - vdb_params.source.operations = VirtualSourceOperations() - - if pre_refresh: - vdb_params.source.operations.pre_refresh = [{ 'type': - 'RunCommandOnSourceOperation', - 'command': pre_refresh }] - - if post_refresh: - vdb_params.source.operations.post_refresh = [{ 'type': - 'RunCommandOnSourceOperation', - 'command': post_refresh }] - - if pre_rollback: - vdb_params.source.operations.pre_rollback = [{ 'type': - 'RunCommandOnSourceOperation', - 'command': pre_rollback }] - - if post_rollback: - vdb_params.source.operations.post_rollback = [{ 'type': - 'RunCommandOnSourceOperation', - 'command': post_rollback }] - - if configure_clone: - vdb_params.source.operations.configure_clone = [{ 'type': - 'RunCommandOnSourceOperation', - 'command': configure_clone }] - - vdb_repo = find_dbrepo_by_environment_ref_and_install_path(engine, - dx_session_obj.server_session, - 'OracleInstall', - environment_obj.reference, - arguments['--envinst']) - - vdb_params.source_config.database_name = db - vdb_params.source_config.unique_name = unique_name - vdb_params.source_config.instance = OracleInstance() - vdb_params.source_config.instance.instance_name = inst_name - vdb_params.source_config.instance.instance_number = 1 - vdb_params.source_config.repository = vdb_repo.reference - - dx_timeflow_obj = DxTimeflow(dx_session_obj.server_session) - vdb_params.timeflow_point_parameters = \ - dx_timeflow_obj.set_timeflow_point(container_obj, - arguments['--timestamp_type'], - arguments['--timestamp']) - - print vdb_params, '\n\n\n' - print_info(engine["hostname"] + ": Provisioning " + vdb_name) - database.provision(dx_session_obj.server_session, vdb_params) - #Add the job into the jobs dictionary so we can track its progress - - jobs[engine['hostname']] = dx_session_obj.server_session.last_job - #return the job object to the calling statement so that we can tell if - # a job was created or not (will return None, if no job) - - return dx_session_obj.server_session.last_job - - else: - raise DlpxException('\nERROR: %s: %s alread exists\n' % - (engine['hostname'], vdb_name)) - - -def find_all_databases_by_group_name(engine, server, group_name, - exclude_js_container=False): + vdb_params = vo.MSSqlProvisionParameters() + vdb_params.container = vo.MSSqlDatabaseContainer() + vdb_params.container.group = group_ref + vdb_params.container.name = vdb_name + vdb_params.source = vo.MSSqlVirtualSource() + vdb_params.source.allow_auto_vdb_restart_on_host_reboot = False + vdb_params.source_config = vo.MSSqlSIConfig() + vdb_params.source_config.database_name = vdb_name + vdb_params.source_config.environment_user = environment_obj.primary_user + vdb_params.source_config.repository = get_references.find_obj_by_name( + dlpx_obj.server_session, repository, env_inst + ).reference + vdb_params.timeflow_point_parameters = timeflow_obj.set_timeflow_point( + source_obj, timestamp_type, timestamp + ) + vdb_params.timeflow_point_parameters.container = source_obj.reference + dx_logging.print_info(f"{engine_name} provisioning {vdb_name}") + database.provision(dlpx_obj.server_session, vdb_params) + # Add the job into the jobs dictionary so we can track its progress + dlpx_obj.jobs[dlpx_obj.server_session.address] = dlpx_obj.server_session.last_job + + +def create_vfiles_vdb( + dlpx_obj, + group_ref, + vfiles_name, + environment_obj, + source_obj, + mntpoint, + timestamp, + timestamp_type="SNAPSHOT", + pre_refresh=None, + post_refresh=None, + pre_rollback=None, + post_rollback=None, + configure_clone=None, +): """ - Easy way to quickly find databases by group name + Create a vfiles VDB + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param group_ref: Reference of group name where the VDB will be created + :type group_ref: str + :param vfiles_name: Name of the vfiles VDB + :type vfiles_name: str + :param environment_obj: Environment object where the VDB will be created + :type environment_obj: class 'delphixpy.v1_10_2.web.objects + :param source_obj: vfiles object of the source + :type source_obj: class + delphixpy.v1_10_2.web.objects.OracleDatabaseContainer.OracleDatabaseContainer + :param mntpoint: Where to mount the Delphix filesystem + :type mntpoint: str + :param timestamp: The Delphix semantic for the point in time on the + source from which to refresh the VDB + :type timestamp: str + :param timestamp_type: The Delphix semantic for the point in time on + the source from which you want to refresh your VDB either SNAPSHOT or TIME + :type timestamp_type: str + :param pre_refresh: Pre-Hook commands before a refresh + :type pre_refresh: str + :param post_refresh: Post-Hook commands after a refresh + :type post_refresh: str + :param pre_rollback: Commands before a rollback + :type pre_rollback: str + :param post_rollback: Commands after a rollback + :type post_rollback: str + :param configure_clone: Configure clone commands + :type configure_clone: str """ - - #First search groups for the name specified and return its reference - group_obj = find_obj_by_name(dx_session_obj.server_session, group, - group_name) - if group_obj: - databases=database.get_all(server, group=group_obj.reference, - no_js_container_data_source=exclude_js_container) - return databases - - -def find_database_by_name_and_group_name(engine, server, group_name, - database_name): - - databases = find_all_databases_by_group_name(engine, server, group_name) - - for each in databases: - if each.name == database_name: - print_debug('%s: Found a match %s' % (engine['hostname'], - str(each.reference))) - return each - - print_info('%s unable to find %s in %s' % (engine['hostname'], - database_name, group_name)) - - -def find_dbrepo_by_environment_ref_and_install_path(engine, server, - install_type, - f_environment_ref, - f_install_path): - ''' - Function to find database repository objects by environment reference and - install path, and return the object's reference as a string - You might use this function to find Oracle and PostGreSQL database repos. - ''' - print_debug('%s: Searching objects in the %s class for one with the ' - 'environment reference of %s and an install path of %s' % - (engine['hostname'], install_type, f_environment_ref, - f_install_path), debug) - - for obj in repository.get_all(server, environment=f_environment_ref): - if install_type == 'PgSQLInstall': - if (obj.type == install_type and - obj.installation_path == f_install_path): - print_debug('%s: Found a match %s' % (engine['hostname'], - str(obj.reference)), debug) - return obj - - elif install_type == 'OracleInstall': - if (obj.type == install_type and - obj.installation_home == f_install_path): - - print_debug('%s: Fount a match %s' % (engine['hostname'], - str(obj.reference)), debug) - return obj - else: - raise DlpxException('%s: No Repo match found for type %s.\n' % - (engine["hostname"], install_type)) - - -def find_repo_by_environment_ref(engine, repo_type, f_environment_ref, - f_install_path=None): - ''' - Function to find unstructured file repository objects by environment - reference and name, and return the object's reference as a string - You might use this function to find Unstructured File repos. - ''' - - print_debug('\n%s: Searching objects in the %s class for one with the' - 'environment reference of %s\n' % - (engine['hostname'], repo_type, f_environment_ref), debug) - - obj_ref = '' - all_objs = repository.get_all(dx_session_obj.server_session, - environment=f_environment_ref) - - for obj in all_objs: - if obj.name == repo_type: - print_debug(engine['hostname'] + ': Found a match ' + - str(obj.reference)) - return obj - - elif obj.type == repo_type: - print_debug('%s Found a match %s' % (engine['hostname'], - str(obj.reference)), debug) - return obj - - raise DlpxException('%s: No Repo match found for type %s\n' % ( - engine['hostname'], repo_type)) - - -def find_dbrepo_by_environment_ref_and_name(engine, repo_type, - f_environment_ref, f_name): - ''' - Function to find database repository objects by environment reference and - name, and return the object's reference as a string - You might use this function to find MSSQL database repos. - ''' - - print_debug('%s: Searching objects in the %s class for one with the ' - 'environment reference of %s and a name of %s.' % - (engine['hostname'], repo_type, f_environment_ref, f_name), - debug) - - obj_ref = '' - all_objs = repository.get_all(server, environment=f_environment_ref) - - for obj in all_objs: - if (repo_type == 'MSSqlInstance' or repo_type == 'ASEInstance'): - if (obj.type == repo_type and obj.name == f_name): - print_debug('%s: Found a match %s' % (engine['hostname'], - str(obj.reference)), debug) - return obj - - elif repo_type == 'Unstructured Files': - if obj.value == install_type: - print_debug('%s: Found a match %s' % (engine['hostname'], - str(obj.reference)), debug) - return obj - - raise DlpxException('%s: No Repo match found for type %s\n' % - (engine['hostname'], repo_type)) - - -def find_snapshot_by_database_and_name(engine, database_obj, snap_name): + engine_name = list(dlpx_obj.dlpx_ddps)[0] + timeflow_obj = dx_timeflow.DxTimeflow(dlpx_obj.server_session) + try: + vdb_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, database, vfiles_name + ) + raise dlpx_exceptions.DlpxObjectExists(f"{vdb_obj} exists.") + except dlpx_exceptions.DlpxObjectNotFound: + pass + vfiles_params = vo.AppDataProvisionParameters() + vfiles_params.source = vo.AppDataVirtualSource() + vfiles_params.source_config = vo.AppDataDirectSourceConfig() + vfiles_params.source.allow_auto_vdb_restart_on_host_reboot = True + vfiles_params.container = vo.AppDataContainer() + vfiles_params.container.group = group_ref + vfiles_params.container.name = vfiles_name + vfiles_params.source_config.name = vfiles_name + vfiles_params.source_config.path = f"{mntpoint}/{vfiles_name}" + vfiles_params.source_config.environment_user = environment_obj.primary_user + vfiles_params.source_config.repository = get_references.find_db_repo( + dlpx_obj.server_session, + "AppDataRepository", + environment_obj.reference, + "Unstructured Files", + ) + vfiles_params.source.name = vfiles_name + vfiles_params.source.parameters = {} + vfiles_params.source.operations = vo.VirtualSourceOperations() + if pre_refresh: + vfiles_params.source.operations.pre_refresh = vo.RunCommandOnSourceOperation() + vfiles_params.source.operations.pre_refresh.command = pre_refresh + if post_refresh: + vfiles_params.source.operations.post_refresh = vo.RunCommandOnSourceOperation() + vfiles_params.source.operations.post_refresh.command = post_refresh + if pre_rollback: + vfiles_params.source.operations.pre_rollback = vo.RunCommandOnSourceOperation + vfiles_params.source.operations.pre_rollback.command = pre_rollback + if post_rollback: + vfiles_params.source.operations.post_rollback = vo.RunCommandOnSourceOperation() + vfiles_params.source.operations.post_rollback.command = post_rollback + if configure_clone: + vfiles_params.source.operations.configure_clone = ( + vo.RunCommandOnSourceOperation() + ) + vfiles_params.source.operations.configure_clone.command = configure_clone + if timestamp_type is None: + vfiles_params.timeflow_point_parameters = vo.TimeflowPointSemantic() + vfiles_params.timeflow_point_parameters.container = source_obj.reference + vfiles_params.timeflow_point_parameters.location = "LATEST_POINT" + elif timestamp_type.upper() == "SNAPSHOT": + try: + dx_snap_params = timeflow_obj.set_timeflow_point( + source_obj, timestamp_type, timestamp + ) + except exceptions.RequestError as err: + raise dlpx_exceptions.DlpxException( + f"Could not set the timeflow point:\n{err}" + ) + if dx_snap_params.type == "TimeflowPointSemantic": + vfiles_params.timeflow_point_parameters = vo.TimeflowPointSemantic() + vfiles_params.timeflow_point_parameters.container = dx_snap_params.container + vfiles_params.timeflow_point_parameters.location = dx_snap_params.location + elif dx_snap_params.type == "TimeflowPointTimestamp": + vfiles_params.timeflow_point_parameters = vo.TimeflowPointTimestamp() + vfiles_params.timeflow_point_parameters.timeflow = dx_snap_params.timeflow + vfiles_params.timeflow_point_parameters.timestamp = dx_snap_params.timestamp + dx_logging.print_info(f"{engine_name}: Provisioning {vfiles_name}\n") + try: + database.provision(dlpx_obj.server_session, vfiles_params) + except (exceptions.RequestError, exceptions.HttpError) as err: + raise dlpx_exceptions.DlpxException( + f"ERROR: Could not provision the database {vfiles_name}\n{err}" + ) + # Add the job into the jobs dictionary so we can track its progress + dlpx_obj.jobs[dlpx_obj.server_session.address] = dlpx_obj.server_session.last_job + + +def create_oracle_si_vdb( + dlpx_obj, + group_ref, + vdb_name, + environment_obj, + source_obj, + env_inst, + mntpoint, + timestamp, + timestamp_type="SNAPSHOT", + pre_refresh=None, + post_refresh=None, + pre_rollback=None, + post_rollback=None, + configure_clone=None, +): """ - Find snapshots by database and name. Return snapshot reference. - - engine: Dictionary of engines from config file. - database_obj: Database object to find the snapshot against - snap_name: Name of the snapshot + Create an Oracle SI VDB + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param group_ref: Group name where the VDB will be created + :type group_ref: str + :param vdb_name: Name of the VDB + :type vdb_name: str + :param source_obj: Database object of the source + :type source_obj: class + delphixpy.v1_10_2.web.objects.OracleDatabaseContainer.OracleDatabaseContainer + :param environment_obj: Environment object where the VDB will be created + :type environment_obj: class 'delphixpy.v1_10_2.web.objects + :param env_inst: Environment installation identifier in Delphix. + EX: "/u01/app/oracle/product/11.2.0/dbhome_1" + EX: ASETARGET + :type env_inst: str + :param mntpoint: Where to mount the Delphix filesystem + :type mntpoint: str + :param timestamp: The Delphix semantic for the point in time on the + source from which to refresh the VDB + :type timestamp: str + :param timestamp_type: The Delphix semantic for the point in time on + the source from which you want to refresh your VDB either SNAPSHOT or TIME + :type timestamp_type: str + :param pre_refresh: Pre-Hook commands before a refresh + :type pre_refresh: str + :param post_refresh: Post-Hook commands after a refresh + :type post_refresh: str + :param pre_rollback: Commands before a rollback + :type pre_rollback: str + :param post_rollback: Commands after a rollback + :type post_rollback: str + :param configure_clone: Configure clone commands + :type configure_clone: str """ - - snapshots = snapshot.get_all(dx_session_obj.server_session, - database=database_obj.reference) - matches = [] - for snapshot_obj in snapshots: - if str(snapshot_obj.name).startswith(arguments['--timestamp']): - matches.append(snapshot_obj) - - for each in matches: - print_debug(each.name, debug) - - if len(matches) == 1: - print_debug('%s: Found one and only one match. This is good.\n %s' % - (engine['hostname'], matches[0]), debug) - return matches[0] - - elif len(matches) > 1: - raise DlpxException('%s: The name specified was not specific enough.' - ' More than one match found.\n' % - (engine['hostname'],)) - - else: - raise DlpxException('%s: No matches found for the time specified.\n' - % (engine['hostname'])) - - -def find_snapshot_by_database_and_time(engine, database_obj, snap_time): - snapshots = snapshot.get_all(dx_session_obj.server_session, - database=database_obj.reference) - matches = [] - - for snapshot_obj in snapshots: - if str(snapshot_obj.latest_change_point.timestamp).startswith(arguments['--timestamp']): - - matches.append(snapshot_obj) - - if len(matches) == 1: - print_debug('%s": Found one and only one match. This is good.\n%s' % - (engine['hostname'], matches[0]), debug) - - return matches[0] - - elif len(matches) > 1: - print_debug(matches, debug) - - raise DlpxException('%s: The time specified was not specific enough.' - 'More than one match found.\n' % - (engine['hostname'])) - else: - raise DlpxException('%s: No matches found for the time specified.\n' - % (engine['hostname'])) - - -def find_source_by_database(engine, database_obj): - #The source tells us if the database is enabled/disables, virtual, - # vdb/dSource, or is a staging database. - source_obj = source.get_all(server, database=database_obj.reference) - - #We'll just do a little sanity check here to ensure we only have a 1:1 - # result. - if len(source_obj) == 0: - raise DlpxException('%s: Did not find a source for %s. Exiting.\n' % - (engine['hostname'], database_obj.name)) - - elif len(source_obj) > 1: - raise DlpxException('%s: More than one source returned for %s. ' - 'Exiting.\n' % (engine['hostname'], - database_obj.name + ". Exiting")) - return source_obj - - -def run_async(func): + engine_name = list(dlpx_obj.dlpx_ddps)[0] + try: + vdb_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, database, vdb_name + ) + raise dlpx_exceptions.DlpxObjectExists(f"{vdb_obj} exists.") + except dlpx_exceptions.DlpxObjectNotFound: + pass + vdb_params = vo.OracleProvisionParameters() + vdb_params.open_resetlogs = True + vdb_params.container = vo.OracleDatabaseContainer() + vdb_params.container.group = group_ref + vdb_params.container.name = vdb_name + vdb_params.source = vo.OracleVirtualSource() + vdb_params.source.allow_auto_vdb_restart_on_host_reboot = False + vdb_params.source.mount_base = mntpoint + vdb_params.source_config = vo.OracleSIConfig() + vdb_params.source_config.environment_user = environment_obj.primary_user + vdb_params.source.operations = vo.VirtualSourceOperations() + if pre_refresh: + vdb_params.source.operations.pre_refresh = vo.RunCommandOnSourceOperation() + vdb_params.source.operations.pre_refresh.command = pre_refresh + if post_refresh: + vdb_params.source.operations.post_refresh = vo.RunCommandOnSourceOperation() + vdb_params.source.operations.post_refresh.command = post_refresh + if pre_rollback: + vdb_params.source.operations.pre_rollback = vo.RunCommandOnSourceOperation + vdb_params.source.operations.pre_rollback.command = pre_rollback + if post_rollback: + vdb_params.source.operations.post_rollback = vo.RunCommandOnSourceOperation() + vdb_params.source.operations.post_rollback.command = post_rollback + if configure_clone: + vdb_params.source.operations.configure_clone = vo.RunCommandOnSourceOperation() + vdb_params.source.operations.configure_clone.command = configure_clone + vdb_params.source_config.database_name = vdb_name + vdb_params.source_config.unique_name = vdb_name + vdb_params.source_config.instance = vo.OracleInstance() + vdb_params.source_config.instance.instance_name = vdb_name + vdb_params.source_config.instance.instance_number = 1 + vdb_params.source_config.repository = get_references.find_db_repo( + dlpx_obj.server_session, "OracleInstall", environment_obj.reference, env_inst + ) + timeflow_obj = dx_timeflow.DxTimeflow(dlpx_obj.server_session) + vdb_params.timeflow_point_parameters = timeflow_obj.set_timeflow_point( + source_obj, timestamp_type, timestamp + ) + dx_logging.print_info(f"{engine_name}: Provisioning {vdb_name}") + try: + database.provision(dlpx_obj.server_session, vdb_params) + except (exceptions.RequestError, exceptions.HttpError) as err: + raise dlpx_exceptions.DlpxException( + f"ERROR: Could not provision the database {vdb_name}\n{err}" + ) + # Add the job into the jobs dictionary so we can track its progress + dlpx_obj.jobs[dlpx_obj.server_session.address] = dlpx_obj.server_session.last_job + + +def create_oracle_mt_vdb( + dlpx_obj, + group_ref, + vdb_name, + source_obj, + mntpoint, + timestamp, + timestamp_type="SNAPSHOT", + pre_refresh=None, + post_refresh=None, + pre_rollback=None, + post_rollback=None, + configure_clone=None, +): """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - E.g.: - @run_async - def task1(): - do_something - @run_async - def task2(): - do_something_too - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() + Create an Oracle Multi Tenant VDB + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param group_ref: Group name where the VDB will be created + :type group_ref: str + :param vdb_name: Name of the VDB + :type vdb_name: str + delphixpy.v1_10_2.web.objects.OracleDatabaseContainer.OracleDatabaseContainer + :param mntpoint: Where to mount the Delphix filesystem + :type mntpoint: str + :param timestamp: The Delphix semantic for the point in time on the + source from which to refresh the VDB + :type timestamp: str + :param timestamp_type: The Delphix semantic for the point in time on + the source from which you want to refresh your VDB either SNAPSHOT or TIME + :type timestamp_type: str + :param pre_refresh: Pre-Hook commands before a refresh + :type pre_refresh: str + :param post_refresh: Post-Hook commands after a refresh + :type post_refresh: str + :param pre_rollback: Commands before a rollback + :type pre_rollback: str + :param post_rollback: Commands after a rollback + :type post_rollback: str + :param configure_clone: Configure clone commands + :type configure_clone: str """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func + engine_name = list(dlpx_obj.dlpx_ddps)[0] + cdb_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, sourceconfig, ARGUMENTS["--source"] + ) + try: + vdb_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, database, vdb_name + ) + raise dlpx_exceptions.DlpxObjectExists(f"{vdb_obj} exists.") + except dlpx_exceptions.DlpxObjectNotFound: + pass + vdb_params = vo.OracleMultitenantProvisionParameters() + vdb_params.open_resetlogs = True + vdb_params.container = vo.OracleDatabaseContainer() + vdb_params.container.group = group_ref + vdb_params.container.name = vdb_name + vdb_params.source = vo.OracleVirtualPdbSource() + vdb_params.source.allow_auto_vdb_restart_on_host_reboot = True + vdb_params.source.mount_base = mntpoint + vdb_params.source_config = vo.OraclePDBConfig() + vdb_params.source_config.database_name = vdb_name + vdb_params.source_config.cdb_config = cdb_obj.cdb_config + vdb_params.source.operations = vo.VirtualSourceOperations() + if pre_refresh: + vdb_params.source.operations.pre_refresh = vo.RunCommandOnSourceOperation() + vdb_params.source.operations.pre_refresh.command = pre_refresh + if post_refresh: + vdb_params.source.operations.post_refresh = vo.RunCommandOnSourceOperation() + vdb_params.source.operations.post_refresh.command = post_refresh + if pre_rollback: + vdb_params.source.operations.pre_rollback = vo.RunCommandOnSourceOperation + vdb_params.source.operations.pre_rollback.command = pre_rollback + if post_rollback: + vdb_params.source.operations.post_rollback = vo.RunCommandOnSourceOperation() + vdb_params.source.operations.post_rollback.command = post_rollback + if configure_clone: + vdb_params.source.operations.configure_clone = vo.RunCommandOnSourceOperation() + vdb_params.source.operations.configure_clone.command = configure_clone + timeflow_obj = dx_timeflow.DxTimeflow(dlpx_obj.server_session) + vdb_params.timeflow_point_parameters = timeflow_obj.set_timeflow_point( + source_obj, timestamp_type, timestamp + ) + dx_logging.print_info(f"{engine_name}: Provisioning {vdb_name}") + try: + database.provision(dlpx_obj.server_session, vdb_params) + except (exceptions.RequestError, exceptions.HttpError) as err: + raise dlpx_exceptions.DlpxException( + f"ERROR: Could not provision the database {vdb_name}\n{err}" + ) + # Add the job into the jobs dictionary so we can track its progress + dlpx_obj.jobs[dlpx_obj.server_session.address] = dlpx_obj.server_session.last_job @run_async -def main_workflow(engine): +def main_workflow(engine, dlpx_obj, single_thread): """ - This function actually runs the jobs. + This function is where we create our main workflow. Use the @run_async decorator to run this function asynchronously. - This allows us to run against multiple Delphix Engine simultaneously - - engine: Dictionary containing engine information + The @run_async decorator allows us to run against multiple Delphix Engine + simultaneously + :param engine: Dictionary of engines + :type engine: dictionary + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param single_thread: True - run single threaded, False - run multi-thread + :type single_thread: bool """ - - #Establish these variables as empty for use later - environment_obj = None - source_objs = None - jobs = {} - try: - #Setup the connection to the Delphix Engine - dx_session_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - - group_obj = find_obj_by_name(dx_session_obj.server_session, group, - arguments['--target_grp']) - - #Get the reference of the target environment. - print_debug('Getting environment for %s\n' % (host_name), debug) - - #Get the environment object by the hostname - environment_obj = find_obj_by_name(dx_session_obj.server_session, - environment, host_name) - - except DlpxException as e: - print('\nERROR: Engine %s encountered an error while provisioning ' - '%s:\n%s\n' % (engine['hostname'], arguments['--target'], e)) - sys.exit(1) - - print_debug('Getting database information for %s\n' % - (arguments['--source']), debug) + # Setup the connection to the Delphix DDP + dlpx_obj.dlpx_session( + engine["ip_address"], + engine["username"], + engine["password"], + engine["use_https"], + ) + except dlpx_exceptions.DlpxException as err: + dx_logging.print_exception( + f"ERROR: dx_provision_vdb encountered an error authenticating to " + f' {engine["ip_address"]} :\n{err}' + ) + group_ref = get_references.find_obj_by_name( + dlpx_obj.server_session, group, ARGUMENTS["--target_grp"] + ).reference + environment_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, environment, ARGUMENTS["--env_name"] + ) + source_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, database, ARGUMENTS["--source"] + ) + thingstodo = ["thingstodo"] try: - #Get the database reference we are copying from the database name - database_obj = find_obj_by_name(dx_session_obj.server_session, - database, arguments['--source']) - except DlpxException: - return - - thingstodo = ["thingtodo"] - #reset the running job count before we begin - i = 0 - - try: - with dx_session_obj.job_mode(single_thread): - while (len(jobs) > 0 or len(thingstodo) > 0): - arg_type = arguments['--type'].lower() - if len(thingstodo)> 0: - + with dlpx_obj.job_mode(single_thread): + while dlpx_obj.jobs or thingstodo: + if thingstodo: + arg_type = ARGUMENTS["--type"].lower() if arg_type == "oracle": - create_oracle_si_vdb(engine, jobs, database_name, - group_obj, environment_obj, - database_obj, - arguments['--prerefresh'], - arguments['--postrefresh'], - arguments['--prerollback'], - arguments['--postrollback'], - arguments['--configure-clone']) - + create_oracle_si_vdb( + dlpx_obj, + group_ref, + ARGUMENTS["--db"], + environment_obj, + source_obj, + ARGUMENTS["--envinst"], + ARGUMENTS["--mntpoint"], + ARGUMENTS["--timestamp"], + ARGUMENTS["--timestamp_type"], + ARGUMENTS["--prerefresh"], + ARGUMENTS["--postrefresh"], + ARGUMENTS["--prerollback"], + ARGUMENTS["--postrollback"], + ARGUMENTS["--configure-clone"], + ) + elif arg_type == "oramt": + create_oracle_mt_vdb( + dlpx_obj, + group_ref, + ARGUMENTS["--db"], + source_obj, + ARGUMENTS["--mntpoint"], + ARGUMENTS["--timestamp"], + ARGUMENTS["--timestamp_type"], + ARGUMENTS["--prerefresh"], + ARGUMENTS["--postrefresh"], + ARGUMENTS["--prerollback"], + ARGUMENTS["--postrollback"], + ARGUMENTS["--configure-clone"], + ) elif arg_type == "ase": - create_ase_vdb(engine, server, jobs, group_obj, - database_name, environment_obj, - database_obj) - + create_ase_vdb( + dlpx_obj, + group_ref, + ARGUMENTS["--db"], + source_obj, + ARGUMENTS["--envinst"], + ARGUMENTS["--timestamp"], + ARGUMENTS["--timestamp_type"], + ARGUMENTS["--no_truncate_log"], + ) elif arg_type == "mssql": - create_mssql_vdb(engine, jobs, group_obj, - database_name, environment_obj, - database_obj) - + create_mssql_vdb( + dlpx_obj, + group_ref, + ARGUMENTS["--db"], + environment_obj, + source_obj, + ARGUMENTS["--envinst"], + ARGUMENTS["--timestamp"], + ARGUMENTS["--timestamp_type"], + ) elif arg_type == "vfiles": - create_vfiles_vdb(engine, jobs, group_obj, - database_name, environment_obj, - database_obj, - arguments['--prerefresh'], - arguments['--postrefresh'], - arguments['--prerollback'], - arguments['--postrollback'], - arguments['--configure-clone']) - + create_vfiles_vdb( + dlpx_obj, + group_ref, + ARGUMENTS["--db"], + environment_obj, + source_obj, + ARGUMENTS["--envinst"], + ARGUMENTS["--mntpoint"], + ARGUMENTS["--prerefresh"], + ARGUMENTS["--postrefresh"], + ARGUMENTS["--prerollback"], + ARGUMENTS["--postrollback"], + ARGUMENTS["--configure-clone"], + ) thingstodo.pop() - - #get all the jobs, then inspect them - i = 0 - for j in jobs.keys(): - job_obj = job.get(dx_session_obj.server_session, jobs[j]) - print_debug(job_obj, debug) - print_info(engine["hostname"] + ": VDB Provision: " + - job_obj.job_state) - - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - #If the job is in a non-running state, remove it from - # the running jobs list. - del jobs[j] - else: - #If the job is in a running state, increment the - # running job count. - i += 1 - - print_info('%s: %s jobs running.' % (engine['hostname'], - str(i))) - - #If we have running jobs, pause before repeating the checks. - if len(jobs) > 0: - sleep(float(arguments['--poll'])) - - except (DlpxException, JobError) as e: - print '\nError while provisioning %s:\n%s' % (database_name, e.message) - sys.exit(1) - - -def run_job(): + run_job.find_job_state(engine, dlpx_obj) + except ( + dlpx_exceptions.DlpxException, + dlpx_exceptions.DlpxObjectNotFound, + exceptions.RequestError, + exceptions.JobError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f"Error in dx_provision_vdb: " f'{engine["ip_address"]}\n{err}' + ) + + +def main(): """ - This function runs the main_workflow aynchronously against all the servers - specified - - No arguments required for run_job(). + main function - creates session and runs jobs """ - #Create an empty list to store threads we create. - threads = [] - - #If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info("Executing against all Delphix Engines in the dxtools.conf") - - try: - #For each server in the dxtools.conf... - for delphix_engine in dx_session_obj.dlpx_engines: - engine = dx_session_obj[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine)) - - except DlpxException as e: - print 'Error encountered in main_workflow:\n%s' % (e) - sys.exit(1) - - elif arguments['--all'] is False: - #Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dx_session_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: %s\n' % - (arguments['--engine'])) - - except (DlpxException, RequestError, KeyError) as e: - raise DlpxException('\nERROR: Delphix Engine %s cannot be ' - 'found in %s. Please check your value ' - 'and try again. Exiting.\n' % ( - arguments['--engine'], config_file_path)) - - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dx_session_obj.dlpx_engines: - if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ - 'true': - - engine = dx_session_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: %s' % ( - dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) - - break - - - if engine == None: - raise DlpxException("\nERROR: No default engine found. Exiting") - - #run the job against the engine - threads.append(main_workflow(engine)) - - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def serversess(f_engine_address, f_engine_username, f_engine_password): - """ - Function to setup the session with the Delphix Engine - """ - server_session= DelphixEngine(f_engine_address, f_engine_username, - f_engine_password, "DOMAIN") - return server_session - - -def set_exit_handler(func): - """ - This function helps us set the correct exit code - """ - signal.signal(signal.SIGTERM, func) - - -def set_timeflow_point(engine, server, container_obj): - """ - This returns the reference of the timestamp specified. - """ - - if arguments['--timestamp_type'].upper() == "SNAPSHOT": - if arguments['--timestamp'].upper() == "LATEST": - print_debug('%s: Using the latest Snapshot.' % - (engine['hostname']), debug) - - timeflow_point_parameters = TimeflowPointSemantic() - timeflow_point_parameters.container = container_obj.reference - timeflow_point_parameters.location = "LATEST_SNAPSHOT" - - elif arguments['--timestamp'].startswith("@"): - print_debug('%s: Using a named snapshot' % (engine['hostname']), - debug) - - snapshot_obj = find_snapshot_by_database_and_name(engine, server, - container_obj, - arguments['--timestamp']) - - if snapshot_obj != None: - timeflow_point_parameters=TimeflowPointLocation() - timeflow_point_parameters.timeflow = snapshot_obj.timeflow - timeflow_point_parameters.location = \ - snapshot_obj.latest_change_point.location - - else: - raise DlpxException('%s: Was unable to use the specified ' - 'snapshot %s for database %s\n' % - (engine['hostname'], - arguments['--timestamp'], - container_obj.name)) - - else: - print_debug('%s: Using a time-designated snapshot' % - (engine['hostname']), debug) - - snapshot_obj = find_snapshot_by_database_and_time(engine, server, - container_obj, - arguments['--timestamp']) - if snapshot_obj != None: - timeflow_point_parameters=TimeflowPointTimestamp() - timeflow_point_parameters.timeflow = snapshot_obj.timeflow - timeflow_point_parameters.timestamp = \ - snapshot_obj.latest_change_point.timestamp - else: - raise DlpxException('%s: Was unable to find a suitable time ' - ' for %s for database %s.\n' % - (engine['hostname'], - arguments['--timestamp'], - container_obj.name)) - - elif arguments['--timestamp_type'].upper() == "TIME": - if arguments['--timestamp'].upper() == "LATEST": - timeflow_point_parameters = TimeflowPointSemantic() - timeflow_point_parameters.location = "LATEST_POINT" - else: - raise DlpxException('%s: Only support a --timestamp value of ' - '"latest" when used with timestamp_type ' - 'of time' %s (engine['hostname'])) - - else: - raise DlpxException('%s is not a valied timestamp_type. Exiting\n' % - (arguments['--timestamp_type'])) - - timeflow_point_parameters.container = container_obj.reference - return timeflow_point_parameters - -def time_elapsed(): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - """ - elapsed_minutes = round((time() - time_start)/60, +1) - return elapsed_minutes - -def update_jobs_dictionary(engine, server, jobs): - """ - This function checks each job in the dictionary and updates its status or - removes it if the job is complete. - Return the number of jobs still running. - """ - #Establish the running jobs counter, as we are about to update the count - # from the jobs report. - i = 0 - #get all the jobs, then inspect them - for j in jobs.keys(): - job_obj = job.get(server, jobs[j]) - print_debug('%s: %s' % (engine['hostname'], str(job_obj)), debug) - print_info('%s: %s: %s' % (engine['hostname'], j.name, - job_obj.job_state)) - - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - #If the job is in a non-running state, remove it from the running - # jobs list. - del jobs[j] - else: - #If the job is in a running state, increment the running job count. - i += 1 - return i - - -def main(argv): - #We want to be able to call on these variables anywhere in the script. - global single_thread - global usebackup - global time_start - global config_file_path - global database_name - global host_name - global dx_session_obj - global debug - + time_start = time.time() + dx_session_obj = get_session.GetSession() + dx_logging.logging_est(ARGUMENTS["--logdir"]) + config_file_path = ARGUMENTS["--config"] + single_thread = ARGUMENTS["--single_thread"] + engine = ARGUMENTS["--engine"] try: - dx_session_obj = GetSession() - debug = arguments['--debug'] - logging_est(arguments['--logdir'], debug) - print_debug(arguments, debug) - time_start = time() - single_thread = False - config_file_path = arguments['--config'] - - print_info('Welcome to %s version %s' % (basename(__file__), - VERSION)) - - #Parse the dxtools.conf and put it into a dictionary dx_session_obj.get_config(config_file_path) - - - database_name = arguments['--target'] - host_name = arguments['--environment'] - - #This is the function that will handle processing main_workflow for + # This is the function that will handle processing main_workflow for # all the servers. - run_job() - - elapsed_minutes = time_elapsed() - print_info('script took %s minutes to get this far. ' % - (str(elapsed_minutes))) - - #Here we handle what we do when the unexpected happens - except SystemExit as e: - """ - This is what we use to handle our sys.exit(#) - """ - sys.exit(e) - - except DlpxException as e: - """ - We use this exception handler when an error occurs in a function call. - """ + for each in run_job.run_job( + main_workflow, dx_session_obj, engine, single_thread + ): + # join them back together so that we wait for all threads to + # complete + each.join() + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"script took {elapsed_minutes} minutes to " f"get this far." + ) + # Here we handle what we do when the unexpected happens + except SystemExit as err: + # This is what we use to handle our sys.exit(#) + sys.exit(err) + + except dlpx_exceptions.DlpxException as err: + # We use this exception handler when an error occurs in a function + # call. + dx_logging.print_exception( + f"ERROR: Please check the ERROR message " f"below:\n {err.error}" + ) + sys.exit(2) - print('\nERROR: Please check the ERROR message below:\n%s' % - (e.message)) + except exceptions.HttpError as err: + # We use this exception handler when our connection to Delphix fails + dx_logging.print_exception( + f"ERROR: Connection failed to the Delphix DDP. Please check " + f"the ERROR message below:\n{err.status}" + ) sys.exit(2) - except HttpError as e: - """ - We use this exception handler when our connection to Delphix fails - """ - print('\nERROR: Connection failed to the Delphix Engine. Please ' - 'check the ERROR message below:\n%s' % (e.message)) + except KeyError as err: + dx_logging.print_exception(f"ERROR: Key not found:\n{err}") sys.exit(2) - except JobError as e: - """ - We use this exception handler when a job fails in Delphix so - that we have actionable data - """ - print 'A job failed in the Delphix Engine:\n%s' (e.job) - elapsed_minutes = time_elapsed() - print_info('%s took %s minutes to get this far' % (basename(__file__), - str(elapsed_minutes))) + except exceptions.JobError as err: + # We use this exception handler when a job fails in Delphix so that we + # have actionable data + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_exception( + f"A job failed in the Delphix Engine:\n{err.job}." + f"{basename(__file__)} took {elapsed_minutes} minutes to get " + f"this far" + ) sys.exit(3) except KeyboardInterrupt: - """ - We use this exception handler to gracefully handle ctrl+c exits - """ - print_debug('You sent a CTRL+C to interrupt the process') - elapsed_minutes = time_elapsed() - print_info('%s took %s minutes to get this far' % (basename(__file__), - str(elapsed_minutes))) + # We use this exception handler to gracefully handle ctrl+c exits + dx_logging.print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"{basename(__file__)} took {elapsed_minutes} " f"minutes to get this far." + ) - except: - """ - Everything else gets caught here - """ - print(sys.exc_info()[0]) - print(traceback.format_exc()) - elapsed_minutes = time_elapsed() - print_info('%s took %s minutes to get this far' % (basename(__file__), - str(elapsed_minutes))) - sys.exit(1) if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - #Feed our arguments to the main function, and off we go! - main(arguments) + # Grab our ARGUMENTS from the doc at the top of the script + ARGUMENTS = docopt.docopt(__doc__, version=basename(__file__) + " " + VERSION) + # Feed our ARGUMENTS to the main function, and off we go! + main() diff --git a/dx_refresh_db.py b/dx_refresh_db.py deleted file mode 100755 index bef3bbc..0000000 --- a/dx_refresh_db.py +++ /dev/null @@ -1,907 +0,0 @@ -#!/usr/bin/env python -#DEPRECATED -#Adam Bowen - Apr 2016 -#This script refreshes a vdb -# Updated by Corey Brune Oct 2016 -#requirements -#pip install --upgrade setuptools pip docopt delphixpy - -#The below doc follows the POSIX compliant standards and allows us to use -#this doc to also define our arguments for the script. This thing is brilliant. -"""Refresh a vdb -Usage: - dx_refresh_db.py (--name | --dsource | --all_vdbs [--group_name ]| --host | --list_timeflows | --list_snapshots) - [--timestamp_type ] - [--timestamp --timeflow ] - [-d | --engine | --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - dx_refresh_db.py -h | --help | -v | --version -Refresh a Delphix VDB -Examples: - dx_refresh_db.py --name "aseTest" --group_name "Analytics" - dx_refresh_db.py --dsource "dlpxdb1" - dx_refresh_db.py --all_vdbs --host LINUXSOURCE --parallel 4 --debug -d landsharkengine - dx_refresh_db.py --all_vdbs --group_name "Analytics" --all -Options: - --name Name of the object you are refreshing. - --all_vdbs Refresh all VDBs that meet the filter criteria. - --dsource Name of dsource in Delphix to execute against. - --group_name Name of the group to execute against. - --list_timeflows List all timeflows - --list_snapshots List all snapshots - --host Name of environment in Delphix to execute against. - --timestamp_type The type of timestamp you are specifying. - Acceptable Values: TIME, SNAPSHOT - [default: SNAPSHOT] - --timestamp - The Delphix semantic for the point in time on - the source from which you want to refresh your VDB. - Formats: - latest point in time or snapshot: LATEST - point in time: "YYYY-MM-DD HH24:MI:SS" - snapshot name: "@YYYY-MM-DDTHH24:MI:SS.ZZZ" - snapshot time from GUI: "YYYY-MM-DD HH24:MI" - [default: LATEST] - --timeflow Name of the timeflow to refresh a VDB - -d Identifier of Delphix engine in dxtools.conf. - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --debug Enable debug logging - --parallel Limit number of jobs to maxjob - --poll The number of seconds to wait between job polls - [default: 10] - --config The path to the dxtools.conf file - [default: ./dxtools.conf] - --logdir The path to the logfile you want to use. - [default: ./dx_refresh_db.log] - -h --help Show this screen. - -v --version Show version. -""" -VERSION = 'v.0.1.615' - - -from docopt import docopt -import logging -from os.path import basename -import sys -import traceback -import json -from time import sleep, time - -from delphixpy.v1_8_0.delphix_engine import DelphixEngine -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0 import job_context -from delphixpy.v1_8_0.web import database -from delphixpy.v1_8_0.web import environment -from delphixpy.v1_8_0.web import group -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web import source -from delphixpy.v1_8_0.web import timeflow -from delphixpy.v1_8_0.web.snapshot import snapshot -from delphixpy.v1_8_0.web.vo import OracleRefreshParameters -from delphixpy.v1_8_0.web.vo import RefreshParameters -from delphixpy.v1_8_0.web.vo import TimeflowPointLocation -from delphixpy.v1_8_0.web.vo import TimeflowPointSemantic -from delphixpy.v1_8_0.web.vo import TimeflowPointTimestamp - -from lib.DlpxException import DlpxException -from lib.GetSession import GetSession -from lib.GetReferences import find_obj_by_name -from lib.DxLogging import logging_est -from lib.DxLogging import print_info -from lib.DxLogging import print_debug -from lib.DxLogging import print_exception - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - E.g.: - @run_async - def task1(): - do_something - @run_async - def task2(): - do_something_too - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func - - -def find_all_databases_by_dsource_name(engine, server, dsource_name, - exclude_js_container=True): - """ - Easy way to quickly find databases by dSource - """ - - #First search for the dSource name specified and return its reference - dsource_obj = find_obj_by_name(engine, server, database, dsource_name) - - if dsource_obj: - return(database.get_all(server, - provision_container=dsource_obj.reference, - no_js_container_data_source=exclude_js_container)) - - -def find_all_databases_by_group_name(engine, server, group_name, - exclude_js_container=True): - """ - Easy way to quickly find databases by group name - """ - - #First search groups for the name specified and return its reference - group_obj = find_obj_by_name(engine, server, group, group_name) - if group_obj: - return(database.get_all(server, group=group_obj.reference, - no_js_container_data_source=exclude_js_container)) - - -def find_database_by_name_and_group_name(engine, server, group_name, - database_name): - - databases = find_all_databases_by_group_name(engine, server, group_name) - - for each in databases: - if each.name == database_name: - print_debug(engine["hostname"] + ": Found a match " + - str(each.reference)) - return each - - print_info(engine["hostname"] + ": Unable to find \"" + - database_name + "\" in " + group_name) - - -def find_snapshot_by_database_and_name(engine, server, database_obj, snap_name): - snapshots = snapshot.get_all(server, database=database_obj.reference) - matches = [] - for snapshot_obj in snapshots: - if str(snapshot_obj.name).startswith(arguments['--timestamp']): - matches.append(snapshot_obj) - - if len(matches) == 1: - - print_debug(engine["hostname"] + - ": Found one and only one match. This is good.") - print_debug(engine["hostname"] + ": " + matches[0]) - - return matches[0] - - elif len(matches) > 1: - print_error("The name specified was not specific enough. " - "More than one match found.") - - for each in matches: - print_debug(engine["hostname"] + ": " + each.name) - else: - print_error("No matches found for the time specified") - print_error("No matching snapshot found") - - -def find_snapshot_by_database_and_time(engine, server, database_obj, snap_time): - """ - Find snapshot object by database name and timetamp - engine: - server: A Delphix engine object. - database_obj: The database reference to retrieve the snapshot - snap_time: timstamp of the snapshot - """ - snapshots = snapshot.get_all(server, database=database_obj.reference) - matches = [] - - for snapshot_obj in snapshots: - if str(snapshot_obj.latest_change_point.timestamp) == snap_time \ - or str(snapshot_obj.first_change_point.timestamp) == snap_time: - - matches.append(snapshot_obj) - - if len(matches) == 1: - snap_match = get_obj_name(server, database, matches[0].container) - print_debug(engine['hostname'] + - ': Found one and only one match. This is good.') - print_debug(engine['hostname'] + ': ' + snap_match) - - - return matches[0] - - elif len(matches) > 1: - print_debug(engine["hostname"] + ": " + matches) - raise DlpxException('The time specified was not specific enough.' - ' More than one match found.\n') - else: - raise DlpxException('No matches found for the time specified.\n') - - -def find_source_by_database(engine, server, database_obj): - #The source tells us if the database is enabled/disables, virtual, - # vdb/dSource, or is a staging database. - source_obj = source.get_all(server, database=database_obj.reference) - - #We'll just do a little sanity check here to ensure we only have a - # 1:1 result. - if len(source_obj) == 0: - print_error(engine["hostname"] + ": Did not find a source for " + - database_obj.name + ". Exiting") - sys.exit(1) - - elif len(source_obj) > 1: - print_error(engine["hostname"] + - ": More than one source returned for " + - database_obj.name + ". Exiting") - print_error(source_obj) - sys.exit(1) - - return source_obj - - -def get_config(config_file_path): - """ - This function reads in the dxtools.conf file - """ - #First test to see that the file is there and we can open it - try: - config_file = open(config_file_path).read() - except: - print_error("Was unable to open " + config_file_path + - ". Please check the path and permissions, then try again.") - sys.exit(1) - - #Now parse the file contents as json and turn them into a - # python dictionary, throw an error if it isn't proper json - try: - config = json.loads(config_file) - except: - print_error("Was unable to read " + config_file_path + - " as json. Please check file in a json formatter and " \ - "try again.") - sys.exit(1) - - #Create a dictionary of engines (removing the data node from the - # dxtools.json, for easier parsing) - delphix_engines = {} - for each in config['data']: - delphix_engines[each['hostname']] = each - - print_debug(delphix_engines) - return delphix_engines - - -def job_mode(server): - """ - This function tells Delphix how to execute jobs, based on the - single_thread variable at the beginning of the file - """ - #Synchronously (one at a time) - if single_thread == True: - job_m = job_context.sync(server) - print_debug("These jobs will be executed synchronously") - #Or asynchronously - else: - job_m = job_context.async(server) - print_debug("These jobs will be executed asynchronously") - return job_m - - -def job_wait(): - """ - This job stops all work in the thread/process until all jobs on the - engine are completed. - """ - #Grab all the jos on the server (the last 25, be default) - all_jobs = job.get_all(server) - #For each job in the list, check to see if it is running (not ended) - for jobobj in all_jobs: - if not (jobobj.job_state in ["CANCELED", "COMPLETED", "FAILED"]): - print_debug("Waiting for " + jobobj.reference + " (currently: " + - jobobj.job_state + - ") to finish running against the container") - - #If so, wait - job_context.wait(server,jobobj.reference) - - -def get_obj_name(server, f_object, obj_reference): - """ - Return the object name from obj_reference - - engine: A Delphix engine object. - obj_reference: The object reference to retrieve the name - """ - - try: - obj_name = f_object.get(server, obj_reference) - return(obj_name.name) - - except RequestError as e: - raise dlpxExceptionHandler(e) - - except HttpError as e: - raise DlpxException(e) - - -def list_snapshots(server): - """ - List all snapshots with timestamps - """ - - header = 'Snapshot Name, First Change Point, Location, Latest Change Point' - snapshots = snapshot.get_all(server) - - print header - for snap in snapshots: - container_name = get_obj_name(server, database, snap.container) - snap_range = snapshot.timeflow_range(server, snap.reference) - - print '{}, {}, {}, {}, {}'.format(str(snap.name), - container_name, - snap_range.start_point.timestamp, - snap_range.start_point.location, - snap_range.end_point.timestamp) - - -@run_async -def main_workflow(engine): - """ - This function is where we create our main workflow. - Use the @run_async decorator to run this function asynchronously. - The @run_async decorator allows us to run against multiple Delphix Engine - simultaneously - """ - - #Pull out the values from the dictionary for this engine - engine_address = engine["ip_address"] - engine_username = engine["username"] - engine_password = engine["password"] - #Establish these variables as empty for use later - databases = [] - environment_obj = None - source_objs = None - jobs = {} - - - #Setup the connection to the Delphix Engine - server = serversess(engine_address, engine_username, engine_password) - - #If an environment/server was specified - if host_name: - print_debug(engine["hostname"] + ": Getting environment for " + - host_name) - #Get the environment object by the hostname - environment_obj = find_obj_by_name(engine, server, environment, - host_name) - - if environment_obj != None: - #Get all the sources running on the server - env_source_objs = source.get_all(server, - environment=environment_obj.reference) - - #If the server doesn't have any objects, exit. - if env_source_objs == None: - print_error(host_name + "does not have any objects. Exiting") - sys.exit(1) - - #If we are only filtering by the server, then put those objects in - # the main list for processing - if not(arguments['--group_name'] and database_name): - source_objs = env_source_objs - all_dbs = database.get_all(server, - no_js_container_data_source=True) - databases = [] - for source_obj in source_objs: - if source_obj.staging == False and \ - source_obj.virtual == True: - - database_obj = database.get(server, - source_obj.container) - - if database_obj in all_dbs: - databases.append(database_obj) - else: - print_error(engine["hostname"] + ":No environment found for " + - host_name + ". Exiting") - sys.exit(1) - - #If we specified a specific database by name.... - if arguments['--name']: - #Get the database object from the name - - database_obj = find_obj_by_name(engine, server, database, - arguments['--name']) - if database_obj: - databases.append(database_obj) - - #Else if we specified a group to filter by.... - elif arguments['--group_name']: - print_debug(engine["hostname"] + ":Getting databases in group " + - arguments['--group_name']) - #Get all the database objects in a group. - databases = find_all_databases_by_group_name(engine, server, - arguments['--group_name']) - - #Else if we specified a dSource to filter by.... - elif arguments['--dsource']: - print_debug(engine["hostname"] + ":Getting databases for dSource" + - arguments['--dsource']) - - #Get all the database objects in a group. - databases = find_all_databases_by_dsource_name(engine, server, - arguments['--dsource']) - - #Else, if we said all vdbs ... - elif arguments['--all_vdbs'] and not arguments['--host'] : - print_debug(engine['hostname'] + ':Getting all VDBs ') - - #Grab all databases, but filter out the database that are in JetStream - #containers, because we can't refresh those this way. - databases = database.get_all(server, no_js_container_data_source=True) - - elif arguments['--list_timeflows']: - list_timeflows(server) - - elif arguments['--list_snapshots']: - list_snapshots(server) - - #reset the running job count before we begin - i = 0 - with job_mode(server): - #While there are still running jobs or databases still to process.... - - while (len(jobs) > 0 or len(databases) > 0): - - #While there are databases still to process and we are still under - #the max simultaneous jobs threshold (if specified) - while len(databases) > 0 and (arguments['--parallel'] == None or \ - i < int(arguments['--parallel'])): - - #Give us the next database in the list, and then remove it - database_obj = databases.pop() - #Get the source of the database. - source_obj = find_source_by_database(engine, server, - database_obj) - - #If we applied the environment/server filter AND group filter, - # find the intersecting matches - if environment_obj != None and (arguments['--group_name']): - match = False - - for env_source_obj in env_source_objs: - if source_obj[0].reference in env_source_obj.reference: - match = True - break - if match == False: - print_error(engine["hostname"] + ": " + - database_obj.name + " does not exist on " + - host_name + ". Exiting") - return - - #Refresh the database - refresh_job = refresh_database(engine, server, jobs, - source_obj[0], database_obj) - #If refresh_job has any value, then we know that a job was - # initiated. - - if refresh_job: - #increment the running job count - i += 1 - #Check to see if we are running at max parallel processes, and - # report if so. - if ( arguments['--parallel'] != None and \ - i >= int(arguments['--parallel'])): - - print_info(engine["hostname"] + ": Max jobs reached (" + - str(i) + ")") - - i = update_jobs_dictionary(engine, server, jobs) - print_info(engine["hostname"] + ": " + str(i) + " jobs running. " + - str(len(databases)) + " jobs waiting to run") - - #If we have running jobs, pause before repeating the checks. - if len(jobs) > 0: - sleep(float(arguments['--poll'])) - -def print_error(print_obj): - """ - Call this function with a log message to prefix the message with ERROR - """ - print "ERROR: " + str(print_obj) - logging.error(str(print_obj)) - - -def print_warning(print_obj): - """ - Call this function with a log message to prefix the message with WARNING - """ - print "WARNING: " + str(print_obj) - logging.warning(str(print_obj)) - - -def refresh_database(engine, server, jobs, source_obj, container_obj): - """ - This function actually performs the refresh - engine: - server: Engine object - jobs: list containing running jobs - source_obj: source object used to refresh from snapshot or timeflow - container_obj: VDB container - """ - - #Sanity check to make sure our source object has a reference - if source_obj.reference: - #We can only refresh VDB's - if source_obj.virtual != True: - print_warning(engine["hostname"] + ": " + container_obj.name + - " is not a virtual object. Skipping.") - - #Ensure this source is not a staging database. We can't act upon those. - elif source_obj.staging == True: - print_warning(engine["hostname"] + ": " + container_obj.name + - " is a staging database. Skipping.") - - #Ensure the source is enabled. We can't refresh disabled databases. - elif source_obj.runtime.enabled == "ENABLED" : - source_db = database.get(server, container_obj.provision_container) - if not source_db: - print_error(engine["hostname"] + - ":Was unable to retrieve the source container for " - + container_obj.name) - print_info(engine["hostname"] + ": Refreshing " + - container_obj.name + " from " + source_db.name) - print_debug(engine["hostname"] + ": Type: " + source_obj.type ) - print_debug(engine["hostname"] + ":" + source_obj.type) - - #If the vdb is a Oracle type, we need to use a - # OracleRefreshParameters - - if str(container_obj.reference).startswith("ORACLE"): - refresh_params = OracleRefreshParameters() - else: - refresh_params = RefreshParameters() - - try: - refresh_params.timeflow_point_parameters = set_timeflow_point( - engine, server, - source_db) - print_debug(engine["hostname"] + ":" + str(refresh_params)) - - #Sync it - database.refresh(server, container_obj.reference, - refresh_params) - jobs[container_obj] = server.last_job - - except RequestError as e: - print '\nERROR: Could not set timeflow point:\n%s\n' % ( - e.message.action) - sys.exit(1) - - except DlpxException as e: - print 'ERROR: Could not set timeflow point:\n%s\n' % (e.message) - sys.exit(1) - - - #return the job object to the calling statement so that we can - # tell if a job was created or not (will return None, if no job) - return server.last_job - - #Don't do anything if the database is disabled - else: - print_warning(engine["hostname"] + ": " + container_obj.name + - " is not enabled. Skipping sync") - - -def run_job(engine): - """ - This function runs the main_workflow aynchronously against all the - servers specified - """ - - #Create an empty list to store threads we create. - threads = [] - #If the --all argument was given, run against every engine in dxtools.conf - - if arguments['--all']: - print_info("Executing against all Delphix Engines in the dxtools.conf") - - #For each server in the dxtools.conf... - for delphix_engine in dxtools_objects: - engine = dxtools_objects[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine)) - - else: - #Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dxtools_objects[arguments['--engine']] - print_info("Executing against Delphix Engine: " + - arguments['--engine']) - except: - print_error("Delphix Engine \"" + arguments['--engine'] + "\" \ - cannot be found in " + config_file_path) - print_error("Please check your value and try again. Exiting") - sys.exit(1) - - #Else if the -d argument was given, test to see if the engine exists - # in dxtools.conf - elif arguments['-d']: - try: - engine = dxtools_objects[arguments['-d']] - print_info("Executing against Delphix Engine: " + - arguments['-d']) - except: - print_error("Delphix Engine \"" + arguments['-d'] + - "\" cannot be found in " + config_file_path) - print_error("Please check your value and try again. Exiting") - sys.exit(1) - - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dxtools_objects: - if dxtools_objects[delphix_engine]['default'] == 'true': - engine = dxtools_objects[delphix_engine] - print_info("Executing against the default Delphix Engine" \ - " in the dxtools.conf: " + - dxtools_objects[delphix_engine]['hostname']) - break - if engine == None: - print_error("No default engine found. Exiting") - sys.exit(1) - #run the job against the engine - threads.append(main_workflow(engine)) - - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def serversess(f_engine_address, f_engine_username, f_engine_password): - """ - Function to setup the session with the Delphix Engine - """ - server_session= DelphixEngine(f_engine_address, f_engine_username, - f_engine_password, "DOMAIN") - return server_session - - -def list_timeflows(server): - """ - Retrieve and print all timeflows for a given engine - """ - - ret_timeflow_dct = {} - all_timeflows = timeflow.get_all(server) - - print 'DB Name, Timeflow Name, Timestamp' - - for tfbm_lst in all_timeflows: - try: - - db_name = get_obj_name(server, database, tfbm_lst.container) - print '%s, %s, %s\n' % (str(db_name), str(tfbm_lst.name), - str(tfbm_lst.parent_point.timestamp)) - - except AttributeError: - print '%s, %s\n' % (str(tfbm_lst.name), str(db_name)) - - except TypeError as e: - raise DlpxException('Listing Timeflows encountered an error:\n%s' % - (e.message)) - - except RequestError, e: - dlpx_err = e.message - raise DlpxException(dlpx_err.action) - - -def set_timeflow_point(engine, server, container_obj): - """ - This returns the reference of the timestamp specified. - engine: - server: Delphix Engine object - container_obj: VDB object - """ - - if arguments['--timestamp_type'].upper() == "SNAPSHOT": - if arguments['--timestamp'].upper() == "LATEST": - print_debug(engine["hostname"] + ": Using the latest Snapshot") - timeflow_point_parameters = TimeflowPointSemantic() - timeflow_point_parameters.location = "LATEST_SNAPSHOT" - - elif arguments['--timestamp'].startswith("@"): - print_debug(engine["hostname"] + ": Using a named snapshot") - snapshot_obj = find_snapshot_by_database_and_name(engine, server, - container_obj, - arguments['--timestamp']) - - if snapshot_obj: - timeflow_point_parameters=TimeflowPointLocation() - timeflow_point_parameters.timeflow = snapshot_obj.timeflow - timeflow_point_parameters.location = \ - snapshot_obj.latest_change_point.location - - else: - raise DlpxException('ERROR: Was unable to use the specified ' - 'snapshot %s for database %s.\n' % - (arguments['--timestamp'], - container_obj.name)) - - elif arguments['--timestamp']: - print_debug(engine["hostname"] + - ": Using a time-designated snapshot") - snapshot_obj = find_snapshot_by_database_and_time( - engine, server, container_obj, - arguments['--timestamp']) - - if snapshot_obj: - timeflow_point_parameters=TimeflowPointTimestamp() - timeflow_point_parameters.timeflow = snapshot_obj.timeflow - timeflow_point_parameters.timestamp = \ - snapshot_obj.latest_change_point.timestamp - - else: - raise DlpxException('Was unable to find a suitable time' - ' for %s for database %s' % - (arguments['--timestamp'], - container_obj.name)) - - elif arguments['--timestamp_type'].upper() == "TIME": - - if arguments['--timestamp'].upper() == "LATEST": - timeflow_point_parameters = TimeflowPointSemantic() - timeflow_point_parameters.location = "LATEST_POINT" - - elif arguments['--timestamp']: - timeflow_point_parameters = TimeflowPointTimestamp() - timeflow_point_parameters.type = 'TimeflowPointTimestamp' - timeflow_obj = find_obj_by_name(engine, server, timeflow, - arguments['--timeflow']) - - timeflow_point_parameters.timeflow = timeflow_obj.reference - timeflow_point_parameters.timestamp = arguments['--timestamp'] - return timeflow_point_parameters - else: - raise DlpxException(arguments['--timestamp_type'] + - " is not a valied timestamp_type. Exiting") - - timeflow_point_parameters.container = container_obj.reference - return timeflow_point_parameters - - -def time_elapsed(): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - """ - elapsed_minutes = round((time() - time_start)/60, +1) - return elapsed_minutes - - -def update_jobs_dictionary(engine, server, jobs): - """ - This function checks each job in the dictionary and updates its status or - removes it if the job is complete. - Return the number of jobs still running. - """ - #Establish the running jobs counter, as we are about to update the count - # from the jobs report. - i = 0 - #get all the jobs, then inspect them - for j in jobs.keys(): - job_obj = job.get(server, jobs[j]) - print_debug(engine["hostname"] + ": " + str(job_obj)) - print_info(engine["hostname"] + ": " + j.name + ": " + - job_obj.job_state) - - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - #If the job is in a non-running state, remove it from the running - # jobs list. - del jobs[j] - else: - #If the job is in a running state, increment the running job count. - i += 1 - return i - - -def main(argv): - #We want to be able to call on these variables anywhere in the script. - global single_thread - global usebackup - global time_start - global host_name - global database_name - global config_file_path - global dxtools_objects - - try: - #Declare globals that will be used throughout the script. - logging_est(arguments['--logdir']) - print_debug(arguments) - time_start = time() - engine = None - single_thread = False - database_name = arguments['--name'] - host_name = arguments['--host'] - config_file_path = arguments['--config'] - #Parse the dxtools.conf and put it into a dictionary - dxtools_objects = get_config(config_file_path) - - #This is the function that will handle processing main_workflow for - # all the servers. - run_job(engine) - - elapsed_minutes = time_elapsed() - print_info("script took " + str(elapsed_minutes) + - " minutes to get this far.") - - #Here we handle what we do when the unexpected happens - except SystemExit as e: - """ - This is what we use to handle our sys.exit(#) - """ - sys.exit(e) - except HttpError as e: - """ - We use this exception handler when our connection to Delphix fails - """ - print_error("Connection failed to the Delphix Engine") - print_error( "Please check the ERROR message below") - print_error(e.message) - sys.exit(2) - except JobError as e: - """ - We use this exception handler when a job fails in Delphix so that we - have actionable data - """ - print_error("A job failed in the Delphix Engine") - print_error(e.job) - elapsed_minutes = time_elapsed() - print_info(basename(__file__) + " took " + str(elapsed_minutes) + - " minutes to get this far.") - sys.exit(3) - except KeyboardInterrupt: - """ - We use this exception handler to gracefully handle ctrl+c exits - """ - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info(basename(__file__) + " took " + str(elapsed_minutes) + - " minutes to get this far.") - except: - """ - Everything else gets caught here - """ - print_error(sys.exc_info()[0]) - print_error(traceback.format_exc()) - elapsed_minutes = time_elapsed() - print_info(basename(__file__) + " took " + str(elapsed_minutes) + - " minutes to get this far.") - sys.exit(1) - -if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script - print "THIS SCRIPT IS DEPRECATED. USE dx_refresh_vdb.py, instead" - sys.exit(1) - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - - #Feed our arguments to the main function, and off we go! - main(arguments) diff --git a/dx_refresh_vdb.py b/dx_refresh_vdb.py index 76ebb97..cda0ff3 100755 --- a/dx_refresh_vdb.py +++ b/dx_refresh_vdb.py @@ -1,35 +1,33 @@ -#!/usr/bin/env python -#Adam Bowen - Apr 2016 -#This script refreshes a vdb +#!/usr/bin/env python3 +# Adam Bowen - Apr 2016 +# This script refreshes a vdb # Updated by Corey Brune Oct 2016 -#requirements -#pip install --upgrade setuptools pip docopt delphixpy.v1_8_0 +# requirements +# pip install --upgrade setuptools pip docopt delphixpy -#The below doc follows the POSIX compliant standards and allows us to use -#this doc to also define our arguments for the script. This thing is brilliant. +# The below doc follows the POSIX compliant standards and allows us to use +# this doc to also define our ARGUMENTS for the script. """Refresh a vdb Usage: - dx_refresh_vdb.py (--vdb | --dsource | --all_vdbs [--group_name ]| --host | --list_timeflows | --list_snapshots) - [--timestamp_type ] - [--timestamp --timeflow ] - [-d | --engine | --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] + dx_refresh_vdb.py --vdb + [--timestamp_type --timestamp ] + [--timeflow --engine ] + [--debug] [--poll --single_thread ] + [--config --logdir ] dx_refresh_vdb.py -h | --help | -v | --version + Refresh a Delphix VDB + Examples: - dx_refresh_vdb.py --vdb "aseTest" --group_name "Analytics" - dx_refresh_vdb.py --dsource "dlpxdb1" - dx_refresh_vdb.py --all_vdbs --host LINUXSOURCE --parallel 4 --debug -d landsharkengine - dx_refresh_vdb.py --all_vdbs --group_name "Analytics" --all + dx_refresh_vdb.py --vdb "aseTest" + dx_refresh_vdb.py --vdb testdb1 --timestamp @2021-02-02T20:33:59.052Z --timestamp_type SNAPSHOT + dx_refresh_vdb.py --vdb testdb1 --timestamp 2021-02-04T04:43:58.000Z --timestamp_type TIME + Options: - --vdb Name of the object you are refreshing. - --all_vdbs Refresh all VDBs that meet the filter criteria. - --dsource Name of dsource in Delphix to execute against. - --group_name Name of the group to execute against. - --list_timeflows List all timeflows - --list_snapshots List all snapshots - --host Name of environment in Delphix to execute against. + --vdb Name of the object you are refreshing. + --single_thread Run as a single thread. False if running multiple + threads. + [default: True] --timestamp_type The type of timestamp you are specifying. Acceptable Values: TIME, SNAPSHOT [default: SNAPSHOT] @@ -43,390 +41,232 @@ snapshot time from GUI: "YYYY-MM-DD HH24:MI" [default: LATEST] --timeflow Name of the timeflow to refresh a VDB - -d Identifier of Delphix engine in dxtools.conf. - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. + --engine Alt Identifier of Delphix DDP in dxtools.conf. + all|engine-name + [default: default] --debug Enable debug logging - --parallel Limit number of jobs to maxjob --poll The number of seconds to wait between job polls [default: 10] --config The path to the dxtools.conf file - [default: ./dxtools.conf] + [default: ./config/dxtools.conf] --logdir The path to the logfile you want to use. - [default: ./dx_refresh_db.log] + [default: ./logs/dx_refresh_db.log] -h --help Show this screen. -v --version Show version. """ -VERSION = "v.0.3.004" +import sys +import time +from os.path import basename +import docopt -from docopt import docopt -from os.path import basename -import sys -from time import time, sleep -import traceback +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import database +from delphixpy.v1_10_2.web import vo +from lib import dlpx_exceptions +from lib import dx_logging +from lib import dx_timeflow +from lib import get_references +from lib import get_session +from lib import run_job +from lib.run_async import run_async -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.web import database -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web import environment -from delphixpy.v1_8_0.web import group -from delphixpy.v1_8_0.web import source -from delphixpy.v1_8_0.web import timeflow -from delphixpy.v1_8_0.web.snapshot import snapshot -from delphixpy.v1_8_0.web.vo import OracleRefreshParameters -from delphixpy.v1_8_0.web.vo import RefreshParameters -from delphixpy.v1_8_0.web.vo import TimeflowPointLocation -from delphixpy.v1_8_0.web.vo import TimeflowPointSemantic -from delphixpy.v1_8_0.web.vo import TimeflowPointTimestamp +VERSION = "v.0.3.004" -from lib.DlpxException import DlpxException -from lib.DxTimeflow import DxTimeflow -from lib.GetReferences import find_obj_by_name -from lib.GetReferences import find_source_by_dbname -from lib.GetSession import GetSession -from lib.DxLogging import logging_est -from lib.DxLogging import print_info -from lib.DxLogging import print_debug -from lib.DxLogging import print_exception -def refresh_database(vdb_name,timestamp, timestamp_type='SNAPSHOT'): +def refresh_vdb(dlpx_obj, vdb_name, timestamp, timestamp_type="SNAPSHOT"): """ This function actually performs the refresh engine: - dlpx_obj: Virtualization Engine session object - vdb_name: VDB to be refreshed + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param vdb_name: VDB to be refreshed + :type vdb_name: str + :param timestamp: The Delphix semantic for the point in time on the + source from which to refresh the VDB + :type timestamp: str + :param timestamp_type: either SNAPSHOT or TIME + :type timestamp_type: str """ - - #Sanity check to make sure our source object has a reference - dx_timeflow_obj = DxTimeflow(dx_session_obj.server_session) - container_obj = find_obj_by_name(dx_session_obj.server_session, database, - vdb_name) - source_obj = find_source_by_dbname(dx_session_obj.server_session, database, - vdb_name) - + dx_timeflow_obj = dx_timeflow.DxTimeflow(dlpx_obj.server_session) + dx_logging.print_info(f" Refreshing {vdb_name}") + container_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, database, vdb_name + ) + source_obj = get_references.find_source_by_db_name( + dlpx_obj.server_session, vdb_name + ) # Sanity check to make sure our container object has a reference if container_obj.reference: try: - if container_obj.virtual is not True: - raise DlpxException('{} is not a virtual object. ' - 'Skipping.\n'.format(container_obj.name)) - elif container_obj.staging is True: - raise DlpxException('{} is a virtual object. ' - 'Skipping.\n'.format(container_obj.name)) + if container_obj.virtual is not True or container_obj.staging is True: + dx_logging.print_exception( + f"{container_obj.name} is not a virtual object.\n" + ) elif container_obj.runtime.enabled == "ENABLED": - print_info('\nINFO: Refrshing {} to {}\n'.format( - container_obj.name, timestamp)) - - # This exception is raised if rewinding a vFiles VDB - # since AppDataContainer does not have virtual, staging or - # enabled attributes. + dx_logging.print_info( + f"INFO: Refreshing {container_obj.name} " f"to {timestamp}\n" + ) + # This exception is raised if refreshing a vFiles VDB since + # AppDataContainer does not have virtual, staging or enabled attributes except AttributeError: pass - if source_obj.reference: - #We can only refresh VDB's - if source_obj.virtual != True: - print_info('\nINFO: {} is not a virtual object. Skipping.\n'.format( - container_obj.name)) - - #Ensure this source is not a staging database. We can't act upon those. - elif source_obj.staging == True: - print_info('\nINFO: {} is a staging database. Skipping.\n'.format( - container_obj.name)) - - #Ensure the source is enabled. We can't refresh disabled databases. - elif source_obj.runtime.enabled == "ENABLED" : - source_db = database.get(dx_session_obj.server_session, container_obj.provision_container) - if not source_db: - print_error('\nERROR: Was unable to retrieve the source container for {} \n'.format( - container_obj.name)) - print_info('\nINFO: Refreshing {} from {}\n'.format( - container_obj.name, source_db.name)) - - #If the vdb is a Oracle type, we need to use a - # OracleRefreshParameters - ''' - rewind_params = RollbackParameters() - rewind_params.timeflow_point_parameters = \ - dx_timeflow_obj.set_timeflow_point(container_obj, timestamp_type, - timestamp) - print_debug('{}: {}'.format(engine_name, str(rewind_params))) - ''' - if str(container_obj.reference).startswith("ORACLE"): - refresh_params = OracleRefreshParameters() - else: - refresh_params = RefreshParameters() - - try: - refresh_params.timeflow_point_parameters = \ - dx_timeflow_obj.set_timeflow_point(source_db, timestamp_type, - timestamp) - print_info('\nINFO: Refresh prams {}\n'.format( - refresh_params)) - - #Sync it - database.refresh(dx_session_obj.server_session, container_obj.reference, - refresh_params) - dx_session_obj.jobs[dx_session_obj.server_session.address] = \ - dx_session_obj.server_session.last_job - - except RequestError as e: - print '\nERROR: Could not set timeflow point:\n%s\n' % ( - e.message.action) - sys.exit(1) - - except DlpxException as e: - print 'ERROR: Could not set timeflow point:\n%s\n' % (e.message) - sys.exit(1) - - #Don't do anything if the database is disabled + try: + source_db = database.get( + dlpx_obj.server_session, container_obj.provision_container + ) + except (exceptions.RequestError, exceptions.JobError) as err: + raise dlpx_exceptions.DlpxException( + f"Encountered error while refreshing {vdb_name}:\n{err}" + ) + + if str(container_obj.reference).startswith("ORACLE"): + refresh_params = vo.OracleRefreshParameters() else: - print_info('\nINFO: {} is not enabled. Skipping sync.\n'.format( - container_obj.name)) - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - E.g.: - @run_async - def task1(): - do_something - @run_async - def task2(): - do_something_too - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl + refresh_params = vo.RefreshParameters() + refresh_params.timeflow_point_parameters = dx_timeflow_obj.set_timeflow_point( + source_db, timestamp_type, timestamp + ) + try: + database.refresh( + dlpx_obj.server_session, container_obj.reference, refresh_params + ) + dlpx_obj.jobs[dlpx_obj.server_session.address].append( + dlpx_obj.server_session.last_job + ) + except (dlpx_exceptions.DlpxException, exceptions.RequestError) as err: + dx_logging.print_exception(f"ERROR: Could not set timeflow point:{err}") + raise dlpx_exceptions.DlpxException( + f"ERROR: Could not set timeflow point:{err}" + ) + # Don't do anything if the database is disabled + else: + dx_logging.print_info( + f"INFO: {container_obj.name} is not enabled. Refresh will not continue.\n" + ) - return async_func @run_async -def main_workflow(engine): +def main_workflow(engine, dlpx_obj, single_thread): """ - This function actually runs the jobs. + This function is where we create our main workflow. Use the @run_async decorator to run this function asynchronously. - This allows us to run against multiple Delphix Engine simultaneously - - engine: Dictionary of engines + The @run_async decorator allows us to run against multiple Delphix Engine + simultaneously + :param engine: Dictionary of engines + :type engine: dictionary + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param single_thread: True - run single threaded, False - run multi-thread + :type single_thread: bool """ - jobs = {} - try: - #Setup the connection to the Delphix Engine - dx_session_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - - except DlpxException as e: - print_exception('\nERROR: Engine {} encountered an error while' - '{}:\n{}\n'.format(engine['hostname'], - arguments['--target'], e)) - sys.exit(1) - - thingstodo = ["thingtodo"] - with dx_session_obj.job_mode(single_thread): - while len(dx_session_obj.jobs) > 0 or len(thingstodo) > 0: - if len(thingstodo)> 0: - refresh_database(arguments['--vdb'], - arguments['--timestamp'], - arguments['--timestamp_type']) - thingstodo.pop() - - #get all the jobs, then inspect them - i = 0 - for j in dx_session_obj.jobs.keys(): - job_obj = job.get(dx_session_obj.server_session, - dx_session_obj.jobs[j]) - print_debug(job_obj) - print_info('{}: Operations: {}'.format(engine['hostname'], - job_obj.job_state)) - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - #If the job is in a non-running state, remove it from the - # running jobs list. - del dx_session_obj.jobs[j] - elif job_obj.job_state in 'RUNNING': - #If the job is in a running state, increment the running - # job count. - i += 1 - - print_info('{}: {:d} jobs running.'.format( - engine['hostname'], i)) - - #If we have running jobs, pause before repeating the checks. - if len(dx_session_obj.jobs) > 0: - sleep(float(arguments['--poll'])) - - -def run_job(): - """ - This function runs the main_workflow aynchronously against all the servers - specified - """ - #Create an empty list to store threads we create. - threads = [] - engine = None - - #If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info("Executing against all Delphix Engines in the dxtools.conf") - - try: - #For each server in the dxtools.conf... - for delphix_engine in dx_session_obj.dlpx_engines: - engine = dx_session_obj[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine)) - - except DlpxException as e: - print 'Error encountered in run_job():\n{}'.format(e) - sys.exit(1) - - else: - #Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dx_session_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: {}\n'.format( - (arguments['--engine']))) - - except (DlpxException, RequestError, KeyError) as e: - raise DlpxException('\nERROR: Delphix Engine {} cannot be ' - 'found in {}. Please check your value ' - 'and try again. Exiting.\n'.format( - arguments['--engine'], config_file_path)) - - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dx_session_obj.dlpx_engines: - if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ - 'true': - - engine = dx_session_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: {}'.format( - dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) - break - - if engine == None: - raise DlpxException("\nERROR: No default engine found. Exiting") - - #run the job against the engine - threads.append(main_workflow(engine)) - - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def time_elapsed(): + # Setup the connection to the Delphix DDP + dlpx_obj.dlpx_session( + engine["ip_address"], + engine["username"], + engine["password"], + engine["use_https"], + ) + except dlpx_exceptions.DlpxException as err: + dx_logging.print_exception( + f"ERROR: dx_refresh_vdb encountered an error authenticating to " + f'{engine["hostname"]} {ARGUMENTS["--target"]}:\n{err}\n' + ) + try: + with dlpx_obj.job_mode(single_thread): + vdb_list = ARGUMENTS["--vdb"].split(":") + for vdb_name in vdb_list: + dx_logging.print_info(f"main_workflow(): refresh {vdb_name}") + refresh_vdb( + dlpx_obj, + vdb_name, + ARGUMENTS["--timestamp"], + ARGUMENTS["--timestamp_type"], + ) + dx_logging.print_info(f"main_workflow(): All refreshes must be running now") + run_job.track_running_jobs(engine, dlpx_obj) + except ( + dlpx_exceptions.DlpxException, + dlpx_exceptions.DlpxObjectNotFound, + exceptions.RequestError, + exceptions.JobError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f"Error in dx_refresh_vdb:" f'{engine["ip_address"]}\n{err}' + ) + + +def main(): """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time + main function - creates session and runs jobs """ - #elapsed_minutes = round((time() - time_start)/60, +1) - #return elapsed_minutes - return round((time() - time_start)/60, +1) - - -def main(arguments): - #We want to be able to call on these variables anywhere in the script. - global single_thread - global usebackup - global time_start - global config_file_path - global dx_session_obj - global debug - - if arguments['--debug']: - debug = True - + time_start = time.time() try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - time_start = time() - engine = None - single_thread = False - config_file_path = arguments['--config'] - #Parse the dxtools.conf and put it into a dictionary + dx_session_obj = get_session.GetSession() + dx_logging.logging_est(ARGUMENTS["--logdir"]) + config_file_path = ARGUMENTS["--config"] + single_thread = ARGUMENTS["--single_thread"] + engine = ARGUMENTS["--engine"] dx_session_obj.get_config(config_file_path) - - #This is the function that will handle processing main_workflow for + # This is the function that will handle processing main_workflow for # all the servers. - run_job() - - #elapsed_minutes = time_elapsed() - print_info('script took {:.2f} minutes to get this far.'.format( - time_elapsed())) - - #Here we handle what we do when the unexpected happens - except SystemExit as e: - """ - This is what we use to handle our sys.exit(#) - """ - sys.exit(e) - - except HttpError as e: - """ - We use this exception handler when our connection to Delphix fails - """ - print_exception('Connection failed to the Delphix Engine' - 'Please check the ERROR message:\n{}\n').format(e) - sys.exit(1) - - except JobError as e: - """ - We use this exception handler when a job fails in Delphix so that - we have actionable data - """ - elapsed_minutes = time_elapsed() - print_exception('A job failed in the Delphix Engine') - print_info('{} took {:.2f} minutes to get this far:\n{}\n'.format( - basename(__file__), elapsed_minutes, e)) + for each in run_job.run_job_mt( + main_workflow, dx_session_obj, engine, single_thread + ): + # join them back together so that we wait for all threads to + # complete + each.join() + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"script took {elapsed_minutes} minutes to " f"get this far." + ) + # Here we handle what we do when the unexpected happens + except SystemExit as err: + # This is what we use to handle our sys.exit(#) + sys.exit(err) + + except dlpx_exceptions.DlpxException as err: + # We use this exception handler when an error occurs in a function + # call. + dx_logging.print_exception( + f"ERROR: Please check the ERROR message " f"below:\n {err.error}" + ) + sys.exit(2) + + except exceptions.HttpError as err: + # We use this exception handler when our connection to Delphix fails + dx_logging.print_exception( + f"ERROR: Connection failed to the Delphix DDP. Please check " + f"the ERROR message below:\n{err.status}" + ) + sys.exit(2) + + except exceptions.JobError as err: + # We use this exception handler when a job fails in Delphix so that we + # have actionable data + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_exception( + f"A job failed in the Delphix Engine:\n{err.job}." + f"{basename(__file__)} took {elapsed_minutes} minutes to get " + f"this far" + ) sys.exit(3) except KeyboardInterrupt: - """ - We use this exception handler to gracefully handle ctrl+c exits - """ - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far\n'.format( - basename(__file__), elapsed_minutes)) + # We use this exception handler to gracefully handle ctrl+c exits + dx_logging.print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"{basename(__file__)} took {elapsed_minutes} " f"minutes to get this far." + ) - except: - """ - Everything else gets caught here - """ - print_exception(sys.exc_info()[0]) - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far\n'.format( - basename(__file__), elapsed_minutes)) - sys.exit(1) if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - #Feed our arguments to the main function, and off we go! - main(arguments) + # Grab our ARGUMENTS from the doc at the top of the script + ARGUMENTS = docopt.docopt(__doc__, version=basename(__file__) + " " + VERSION) + # Feed our ARGUMENTS to the main function, and off we go! + main() diff --git a/dx_replication.py b/dx_replication.py deleted file mode 100755 index c5999de..0000000 --- a/dx_replication.py +++ /dev/null @@ -1,421 +0,0 @@ -#!/usr/bin/env python -# Corey Brune - Feb 2017 -#Description: -# This script will setup replication between two hosts. -# -#Requirements -#pip install docopt delphixpy.v1_8_0 - -#The below doc follows the POSIX compliant standards and allows us to use -#this doc to also define our arguments for the script. -"""Description -Usage: - dx_replication.py --rep_name --target_host --target_user --target_pw --rep_objs [--schedule --bandwidth --num_cons --enabled] - dx_replication.py --delete - dx_replication.py --execute - dx_replication.py --list - [--engine | --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - - dx_replication.py -h | --help | -v | --version - -Description -Setup replication between two hosts. -Examples: -dx_replication.py --rep_name mytest --target_host 172.16.169.141 --target_user delphix_admin --target_pw delphix --rep_objs mytest1 --schedule '55 0 19 * * ?' --enabled -dx_replication.py --rep_name mytest --target_host 172.16.169.141 --target_user delphix_admin --target_pw delphix --rep_objs mytest1 --schedule '0 40 20 */4 * ?' --bandwidth 5 --num_cons 2 --enabled - -dx_replication.py --delete mytest - -Options: - --rep_name Name of the replication job. - --target_host Name / IP of the target replication host. - --target_user Username for the replication target host. - --target_pw Password for the user. - --schedule Schedule of the replication job in crontab format. (seconds, minutes, hours, day of month, month) - [default: '0 0 0 */5 * ?'] - --rep_objs Comma delimited list of objects to replicate. - --delete Name of the replication job to delete. - --bandwidth Limit bandwidth to MB/s. - --num_cons Number of network connections for the replication job. - --list List all of the replication jobs. - --execute Name of the replication job to execute. - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --debug Enable debug logging - --parallel Limit number of jobs to maxjob - --poll The number of seconds to wait between job polls - [default: 10] - --config The path to the dxtools.conf file - [default: ./dxtools.conf] - --logdir The path to the logfile you want to use. - [default: ./dx_operations_vdb.log] - -h --help Show this screen. - -v --version Show version. -""" - -VERSION = 'v.0.0.002' - -import sys -from os.path import basename -from time import sleep, time -from docopt import docopt - -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web import database -from delphixpy.v1_8_0.web.replication import spec -from delphixpy.v1_8_0.web.vo import ReplicationSpec -from delphixpy.v1_8_0.web.vo import ReplicationList - -from lib.DlpxException import DlpxException -from lib.DxLogging import logging_est -from lib.DxLogging import print_debug -from lib.DxLogging import print_info -from lib.DxLogging import print_exception -from lib.GetReferences import find_obj_by_name -from lib.GetReferences import find_obj_specs -from lib.GetSession import GetSession - - -def create_replication_job(): - """ - Create a replication job - :return: Reference to the spec object - """ - rep_spec = ReplicationSpec() - rep_spec.name = arguments['--rep_name'] - rep_spec.target_host = arguments['--target_host'] - rep_spec.target_principal = arguments['--target_user'] - rep_spec.target_credential = {'type': 'PasswordCredential', 'password': - arguments['--target_pw']} - rep_spec.object_specification = ReplicationList() - rep_spec.schedule = arguments['--schedule'] - rep_spec.encrypted = True - - if arguments['--num_cons']: - rep_spec.number_of_connections = int(arguments['--num_cons']) - if arguments['--bandwidth']: - rep_spec.bandwidth_limit = int(arguments['--bandwidth']) - if arguments['--enabled']: - rep_spec.enabled = True - try: - rep_spec.object_specification.objects = find_obj_specs( - dx_session_obj.server_session, arguments['--rep_objs'].split(',')) - - ref = spec.create(dx_session_obj.server_session, rep_spec) - if dx_session_obj.server_session.last_job: - dx_session_obj.jobs[dx_session_obj.server_session.address] = \ - dx_session_obj.server_session.last_job - print_info('Successfully created {} with reference ' - '{}\n'.format(arguments['--rep_name'], ref)) - - except (HttpError, RequestError, DlpxException) as e: - print_exception('Could not create replication job {}:\n{}'.format( - arguments['--rep_name'], e)) - - -def delete_replication_job(): - """ - Delete a replication job. - :return: Reference to the spec object - """ - try: - spec.delete(dx_session_obj.server_session, - find_obj_by_name(dx_session_obj.server_session, spec, - arguments['--delete']).reference) - if dx_session_obj.server_session.last_job: - dx_session_obj.jobs[dx_session_obj.server_session.address] = \ - dx_session_obj.server_session.last_job - print_info('Successfully deleted {}.\n'.format(arguments['--delete'])) - - except (HttpError, RequestError, DlpxException) as e: - print_exception('Was not able to delete {}:\n{}'.format( - arguments['--delete'], e)) - - -def list_replication_jobs(): - """ - List the replication jobs on a given engine - """ - obj_names_lst = [] - - for rep_job in spec.get_all(dx_session_obj.server_session): - for obj_spec_ref in rep_job.object_specification.objects: - obj_names_lst.append(database.get(dx_session_obj.server_session, - obj_spec_ref).name) - - print('Name: {}\nReplicated Objects: {}\nEnabled: {}\nEncrypted: {}\n' - 'Reference: {}\nSchedule: {}\nTarget Host: {}\n\n'.format( - rep_job.name, ', '.join(obj_names_lst), rep_job.enabled, - rep_job.encrypted, rep_job.reference, rep_job.schedule, - rep_job.target_host)) - - -def execute_replication_job(obj_name): - """ - Execute a replication job immediately. - :param obj_name: name of object to execute. - """ - try: - spec.execute(dx_session_obj.server_session, - find_obj_by_name(dx_session_obj.server_session, - spec, obj_name).reference) - if dx_session_obj.server_session.last_job: - dx_session_obj.jobs[dx_session_obj.server_session.address] = \ - dx_session_obj.server_session.last_job - print_info('Successfully executed {}.\n'.format(obj_name)) - except (HttpError, RequestError, DlpxException, JobError) as e: - print_exception('Could not execute job {}:\n{}'.format(obj_name, e)) - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - E.g.: - @run_async - def task1(): - do_something - @run_async - def task2(): - do_something_too - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func - - -@run_async -def main_workflow(engine): - """ - This function actually runs the jobs. - Use the @run_async decorator to run this function asynchronously. - This allows us to run against multiple Delphix Engine simultaneously - - engine: Dictionary of engines - """ - - try: - #Setup the connection to the Delphix Engine - dx_session_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - - except DlpxException as e: - print_exception('\nERROR: Engine {} encountered an error while' - '{}:\n{}\n'.format(engine['hostname'], - arguments['--target'], e)) - sys.exit(1) - - thingstodo = ["thingtodo"] - try: - with dx_session_obj.job_mode(single_thread): - while (len(dx_session_obj.jobs) > 0 or len(thingstodo)> 0): - if len(thingstodo) > 0: - if arguments['--rep_name']: - create_replication_job() - elif arguments['--delete']: - delete_replication_job() - elif arguments['--list']: - list_replication_jobs() - elif arguments['--execute']: - execute_replication_job(arguments['--execute']) - thingstodo.pop() - # get all the jobs, then inspect them - i = 0 - for j in dx_session_obj.jobs.keys(): - job_obj = job.get(dx_session_obj.server_session, - dx_session_obj.jobs[j]) - print_debug(job_obj) - print_info('{}: Replication operations: {}'.format( - engine['hostname'], job_obj.job_state)) - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - # If the job is in a non-running state, remove it - # from the - # running jobs list. - del dx_session_obj.jobs[j] - elif job_obj.job_state in 'RUNNING': - # If the job is in a running state, increment the - # running job count. - i += 1 - print_info('{}: {:d} jobs running.'.format( - engine['hostname'], i)) - # If we have running jobs, pause before repeating the - # checks. - if len(dx_session_obj.jobs) > 0: - sleep(float(arguments['--poll'])) - - except (HttpError, RequestError, JobError, DlpxException) as e: - print_exception('ERROR: Could not complete replication' - ' operation:{}'.format(e)) - - -def run_job(): - """ - This function runs the main_workflow aynchronously against all the servers - specified - """ - #Create an empty list to store threads we create. - threads = [] - engine = None - - #If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info("Executing against all Delphix Engines in the dxtools.conf") - - try: - #For each server in the dxtools.conf... - for delphix_engine in dx_session_obj.dlpx_engines: - engine = dx_session_obj[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine)) - - except DlpxException as e: - print 'Error encountered in run_job():\n{}'.format(e) - sys.exit(1) - - elif arguments['--all'] is False: - #Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dx_session_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: {}\n'.format( - arguments['--engine'])) - - except (DlpxException, RequestError, KeyError) as e: - raise DlpxException('\nERROR: Delphix Engine {} cannot be ' - 'found in {}. Please check your value ' - 'and try again. Exiting.\n'.format( - arguments['--engine'], config_file_path)) - - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dx_session_obj.dlpx_engines: - if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ - 'true': - engine = dx_session_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: {}'.format( - dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) - break - - if engine == None: - raise DlpxException("\nERROR: No default engine found. Exiting") - - #run the job against the engine - threads.append(main_workflow(engine)) - - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def time_elapsed(): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - """ - return round((time() - time_start)/60, +1) - - -def main(arguments): - #We want to be able to call on these variables anywhere in the script. - global single_thread - global usebackup - global time_start - global config_file_path - global dx_session_obj - global debug - - if arguments['--debug']: - debug = True - - try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - time_start = time() - single_thread = False - config_file_path = arguments['--config'] - #Parse the dxtools.conf and put it into a dictionary - dx_session_obj.get_config(config_file_path) - - #This is the function that will handle processing main_workflow for - # all the servers. - run_job() - - elapsed_minutes = time_elapsed() - print_info('script took {:.2f} minutes to get this far.'.format( - elapsed_minutes)) - - #Here we handle what we do when the unexpected happens - except SystemExit as e: - """ - This is what we use to handle our sys.exit(#) - """ - sys.exit(e) - - except HttpError as e: - """ - We use this exception handler when our connection to Delphix fails - """ - print_exception('Connection failed to the Delphix Engine' - 'Please check the ERROR message:\n{}'.format(e)) - sys.exit(1) - - except JobError as e: - """ - We use this exception handler when a job fails in Delphix so that - we have actionable data - """ - elapsed_minutes = time_elapsed() - print_exception('A job failed in the Delphix Engine') - print_info('{} took {:.2f} minutes to get this far\n{}'.format( - basename(__file__), elapsed_minutes, e)) - sys.exit(3) - - except KeyboardInterrupt: - """ - We use this exception handler to gracefully handle ctrl+c exits - """ - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far\n'.format( - basename(__file__), elapsed_minutes)) - - except: - """ - Everything else gets caught here - """ - print_exception(sys.exc_info()[0]) - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far\n'.format( - basename(__file__), elapsed_minutes)) - sys.exit(1) - -if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - #Feed our arguments to the main function, and off we go! - main(arguments) diff --git a/dx_rewind_vdb.py b/dx_rewind_vdb.py index 5a23a4d..32b9d9a 100755 --- a/dx_rewind_vdb.py +++ b/dx_rewind_vdb.py @@ -1,19 +1,19 @@ -#!/usr/bin/env python -#Corey Brune - Sep 2016 -#This script performs a rewind of a vdb -#requirements -#pip install --upgrade setuptools pip docopt delphixpy.v1_8_0 +#!/usr/bin/env python3 +# Corey Brune - Sep 2016 +# This script performs a rewind of a vdb +# requirements +# pip install --upgrade setuptools pip docopt delphixpy.v1_8_0 -#The below doc follows the POSIX compliant standards and allows us to use -#this doc to also define our arguments for the script. +# The below doc follows the POSIX compliant standards and allows us to use +# this doc to also define ARGUMENTS for the script. """Rewinds a vdb Usage: - dx_rewind_vdb.py (--vdb [--timestamp_type ] [--timestamp ]) - [--bookmark ] - [ --engine --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] + dx_rewind_vdb.py (--vdb [--timestamp_type ] \ + [--timestamp ]) + [--bookmark --engine --parallel ] \ + [--poll --config --single_thread ] \ + [--logdir ] dx_rewind_vdb.py -h | --help | -v | --version Rewinds a Delphix VDB @@ -21,8 +21,9 @@ Rollback to latest snapshot using defaults: dx_rewind_vdb.py --vdb testVdbUF Rollback using a specific timestamp: - dx_rewind_vdb.py --vdb testVdbUF --timestamp_type snapshot --timestamp 2016-11-15T11:30:17.857Z - + dx_rewind_vdb.py --vdb testVdbUF --timestamp_type snapshot \ + --timestamp 2016-11-15T11:30:17.857Z + Options: --vdb Name of VDB to rewind @@ -30,7 +31,6 @@ --timestamp_type The type of timestamp being used for the reqwind. Acceptable Values: TIME, SNAPSHOT [default: SNAPSHOT] - --all Run against all engines. --timestamp The Delphix semantic for the point in time on the source to rewind the VDB. @@ -40,347 +40,223 @@ snapshot name: "@YYYY-MM-DDTHH24:MI:SS.ZZZ" snapshot time from GUI: "YYYY-MM-DD HH24:MI" [default: LATEST] - --engine Alt Identifier of Delphix engine in dxtools.conf. - --debug Enable debug logging + --engine Identifier of Delphix engine in dxtools.conf. + [default: default] --parallel Limit number of jobs to maxjob --poll The number of seconds to wait between job polls [default: 10] --config The path to the dxtools.conf file - [default: ./dxtools.conf] + [default: ./config/dxtools.conf] --logdir The path to the logfile you want to use. - [default: ./dx_rewind_vdb.log] + [default: ./logs/dx_rewind_vdb.log] -h --help Show this screen. -v --version Show version. """ -VERSION = "v.0.2.016" - - -from docopt import docopt -from os.path import basename import sys -from time import time, sleep -import traceback +import time +from os.path import basename -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.web import database -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web.vo import RollbackParameters -from delphixpy.v1_8_0.web.vo import OracleRollbackParameters +import docopt -from lib.DlpxException import DlpxException -from lib.DxTimeflow import DxTimeflow -from lib.GetReferences import find_obj_by_name -from lib.GetSession import GetSession -from lib.DxLogging import logging_est -from lib.DxLogging import print_info -from lib.DxLogging import print_debug -from lib.DxLogging import print_exception +from delphixpy.v1_8_0.web import vo +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import database +from lib import dlpx_exceptions +from lib import dx_logging +from lib import dx_timeflow +from lib import get_references +from lib import get_session +from lib import run_job +from lib.run_async import run_async +VERSION = "v.0.3.005" -def rewind_database(dlpx_obj, vdb_name, timestamp, timestamp_type='SNAPSHOT'): - """ - This function performs the rewind (rollback) - dlpx_obj: Virtualization Engine session object - vdb_name: VDB to be rewound - timestamp: Point in time to rewind the VDB - timestamp_type: The type of timestamp being used for the rewind +def rewind_database(dlpx_obj, vdb_name, timestamp, timestamp_type="SNAPSHOT"): """ - - engine_name = dlpx_obj.dlpx_engines.keys()[0] - dx_timeflow_obj = DxTimeflow(dlpx_obj.server_session) - container_obj = find_obj_by_name(dlpx_obj.server_session, database, - vdb_name) + Performs the rewind (rollback) of a VDB + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession object + :param vdb_name: VDB to be rewound + :type vdb_name: str + :param timestamp: Point in time to rewind the VDB + :type timestamp: str + :param timestamp_type: The type of timestamp being used for the rewind + :type timestamp_type: str + """ + engine_name = list(dlpx_obj.dlpx_ddps)[0] + dx_timeflow_obj = dx_timeflow.DxTimeflow(dlpx_obj.server_session) + container_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, database, vdb_name + ) # Sanity check to make sure our container object has a reference if container_obj.reference: try: - if container_obj.virtual is not True: - raise DlpxException('{} in engine {} is not a virtual object. ' - 'Skipping.\n'.format(container_obj.name, - engine_name)) - elif container_obj.staging is True: - raise DlpxException('{} in engine {} is a virtual object. ' - 'Skipping.\n'.format(container_obj.name, - engine_name)) - elif container_obj.runtime.enabled == "ENABLED": - print_info('\nINFO: {} Rewinding {} to {}\n'.format( - engine_name, container_obj.name, timestamp)) - - # This exception is raised if rewinding a vFiles VDB - # since AppDataContainer does not have virtual, staging or - # enabled attributes. + if container_obj.runtime.enabled == "ENABLED": + dx_logging.print_info( + f"INFO: {engine_name} Rewinding " + f"{container_obj.name} to {timestamp}\n" + ) + elif container_obj.virtual is not True or container_obj.staging is True: + raise dlpx_exceptions.DlpxException( + f"{container_obj.name} in engine {engine_name} is not " + f"a virtual object. Skipping.\n" + ) + # This exception is raised if rewinding a vFiles VDB since + # AppDataContainer does not have virtual, staging or enabled attributes except AttributeError: pass - - print_debug('{}: Type: {}'.format(engine_name, container_obj.type)) - - # If the vdb is a Oracle type, we need to use a OracleRollbackParameters + # If the vdb is a Oracle type, we need to use a + # OracleRollbackParameters if str(container_obj.reference).startswith("ORACLE"): - rewind_params = OracleRollbackParameters() + rewind_params = vo.OracleRollbackParameters() else: - rewind_params = RollbackParameters() - rewind_params.timeflow_point_parameters = \ - dx_timeflow_obj.set_timeflow_point(container_obj, timestamp_type, - timestamp) - print_debug('{}: {}'.format(engine_name, str(rewind_params))) + rewind_params = vo.RollbackParameters() + rewind_params.timeflow_point_parameters = dx_timeflow_obj.set_timeflow_point( + container_obj, timestamp_type, timestamp + ) try: # Rewind the VDB - database.rollback(dlpx_obj.server_session, container_obj.reference, - rewind_params) + database.rollback( + dlpx_obj.server_session, container_obj.reference, rewind_params + ) dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job - print_info('VDB {} was rolled back.'.format(container_obj.name)) - except (RequestError, HttpError, JobError) as e: - print_exception('ERROR: {} encountered an error on {}' - ' during the rewind process:\n{}'.format( - engine_name, container_obj.name, e)) + except ( + exceptions.RequestError, + exceptions.HttpError, + exceptions.JobError, + ) as err: + raise dlpx_exceptions.DlpxException( + f"ERROR: {engine_name} encountered an error on " + f"{container_obj.name} during the rewind process:\n{err}" + ) # Don't do anything if the database is disabled else: - print_info('{}: {} is not enabled. Skipping sync.'.format(engine_name, - container_obj.name)) - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - - E.g.: - @run_async - def task1(): - do_something - - @run_async - def task2(): - do_something_too - - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func + dx_logging.print_info( + f'{engine_name}: {container_obj.name} is not " f"enabled. Skipping sync.' + ) @run_async -def main_workflow(engine, dlpx_obj): +def main_workflow(engine, dlpx_obj, single_thread): """ This function is where we create our main workflow. Use the @run_async decorator to run this function asynchronously. The @run_async decorator allows us to run against multiple Delphix Engine simultaneously - :param engine: Dictionary of engines :type engine: dictionary - :param dlpx_obj: Virtualization Engine session object - :type dlpx_obj: lib.GetSession.GetSession + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param single_thread: True - run single threaded, False - run multi-thread + :type single_thread: bool """ - try: - #Setup the connection to the Delphix Engine - dlpx_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - except DlpxException as e: - print_exception('ERROR: Engine {} encountered an error while' - 'rewinding {}:\n{}\n'.format(engine['hostname'], - arguments['--target'], e)) - - thingstodo = ["thingtodo"] + # Setup the connection to the Delphix DDP + dlpx_obj.dlpx_session( + engine["ip_address"], + engine["username"], + engine["password"], + engine["use_https"], + ) + except dlpx_exceptions.DlpxException as err: + dx_logging.print_exception( + f"ERROR: dx_rewind_vdb encountered an error authenticating to " + f'{engine["hostname"]} {ARGUMENTS["--target"]}:\n{err}\n' + ) + thingstodo = ["thingstodo"] try: with dlpx_obj.job_mode(single_thread): - while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0: - if len(thingstodo) > 0: - rewind_database(dlpx_obj, arguments['--vdb'], - arguments['--timestamp'], - arguments['--timestamp_type']) + while dlpx_obj.jobs or thingstodo: + if thingstodo: + rewind_database( + dlpx_obj, + ARGUMENTS["--vdb"], + ARGUMENTS["--timestamp"], + ARGUMENTS["--timestamp_type"], + ) thingstodo.pop() - - # get all the jobs, then inspect them - i = 0 - for j in dlpx_obj.jobs.keys(): - job_obj = job.get(dlpx_obj.server_session, - dlpx_obj.jobs[j]) - print_debug(job_obj) - print_info('{}: Refresh of {}: {}'.format( - engine['hostname'], arguments['--vdb'], - job_obj.job_state)) - if job_obj.job_state in ['CANCELED', 'COMPLETED', 'FAILED']: - # If the job is in a non-running state, remove it - # from the running jobs list. - del dlpx_obj.jobs[j] - elif job_obj.job_state in 'RUNNING': - # If the job is in a running state, increment the - # running job count. - i += 1 - print_info('{}: {:d} jobs running.'.format( - engine['hostname'], i)) - # If we have running jobs, pause before repeating the - # checks. - if len(dlpx_obj.jobs) > 0: - sleep(float(arguments['--poll'])) - except (DlpxException, RequestError, JobError, HttpError) as e: - print_exception('Error in dx_rewind_vdb: {}\n{}'.format( - engine['hostname'], e)) - sys.exit(1) + run_job.find_job_state(engine, dlpx_obj) + except ( + dlpx_exceptions.DlpxException, + dlpx_exceptions.DlpxObjectNotFound, + exceptions.RequestError, + exceptions.JobError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f"Error in dx_rewind_vdb:" f'{engine["ip_address"]}\n{err}' + ) -def time_elapsed(time_start): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - - time_start: float containing start time of the script. - """ - return round((time() - time_start)/60, +1) - - -def run_job(dlpx_obj, config_file_path): +def main(): """ - This function runs the main_workflow aynchronously against all the - servers specified - - :param dlpx_obj: Virtualization Engine session object - :type dlpx_obj: lib.GetSession.GetSession - :param config_file_path: string containing path to configuration file. - :type config_file_path: str + main function - creates session and runs jobs """ - - # Create an empty list to store threads we create. - threads = [] - engine = None - - # If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info('Executing against all Delphix Engines in the dxtools.conf') - try: - # For each server in the dxtools.conf... - for delphix_engine in dlpx_obj.dlpx_engines: - engine = dlpx_obj.dlpx_engines[delphix_engine] - # Create a new thread and add it to the list. - threads.append(main_workflow(engine, dlpx_obj)) - except DlpxException as e: - print_exception('Error encountered in run_job():\n{}'.format(e)) - sys.exit(1) - - elif arguments['--all'] is False: - # Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dlpx_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: {}\n'.format( - arguments['--engine'])) - except (DlpxException, RequestError, KeyError): - raise DlpxException('\nERROR: Delphix Engine {} cannot be ' - 'found in {}. Please check your value and' - ' try again. Exiting.\n'.format( - arguments['--engine'], config_file_path)) - else: - # Else search for a default engine in the dxtools.conf - for delphix_engine in dlpx_obj.dlpx_engines: - if dlpx_obj.dlpx_engines[delphix_engine]['default'] == 'true': - engine = dlpx_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: {}'.format( - dlpx_obj.dlpx_engines[delphix_engine]['hostname'])) - break - - if engine is None: - raise DlpxException('\nERROR: No default engine found. Exiting') - - # run the job against the engine - threads.append(main_workflow(engine, dlpx_obj)) - - # For each thread in the list... - for each in threads: - # join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def main(): - # We want to be able to call on these variables anywhere in the script. - global single_thread - global debug - - time_start = time() - single_thread = False - + time_start = time.time() try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - config_file_path = arguments['--config'] - # Parse the dxtools.conf and put it into a dictionary + dx_session_obj = get_session.GetSession() + dx_logging.logging_est(ARGUMENTS["--logdir"]) + config_file_path = ARGUMENTS["--config"] + single_thread = ARGUMENTS["--single_thread"] + engine = ARGUMENTS["--engine"] dx_session_obj.get_config(config_file_path) - # This is the function that will handle processing main_workflow for # all the servers. - run_job(dx_session_obj, config_file_path) - - elapsed_minutes = time_elapsed(time_start) - print_info('script took {:.2f} minutes to get this far.'.format( - elapsed_minutes)) - + for each in run_job.run_job( + main_workflow, dx_session_obj, engine, single_thread + ): + # join them back together so that we wait for all threads to + # complete + each.join() + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"script took {elapsed_minutes} minutes to " f"get this far." + ) # Here we handle what we do when the unexpected happens - except SystemExit as e: + except SystemExit as err: # This is what we use to handle our sys.exit(#) - sys.exit(e) - - except DlpxException as e: - # We use this exception handler when an error occurs in a function call. - print_exception('ERROR: Please check the ERROR message below:\n' - '{}'.format(e.message)) + sys.exit(err) + + except dlpx_exceptions.DlpxException as err: + # We use this exception handler when an error occurs in a function + # call. + dx_logging.print_exception( + f"ERROR: Please check the ERROR message " f"below:\n {err.error}" + ) sys.exit(2) - except HttpError as e: + except exceptions.HttpError as err: # We use this exception handler when our connection to Delphix fails - print_exception('ERROR: Connection failed to the Delphix Engine. Please' - 'check the ERROR message below:\n{}'.format(e.message)) + dx_logging.print_exception( + f"ERROR: Connection failed to the Delphix DDP. Please check " + f"the ERROR message below:\n{err.status}" + ) sys.exit(2) - except JobError as e: + except exceptions.JobError as err: # We use this exception handler when a job fails in Delphix so that we # have actionable data - print_exception('A job failed in the Delphix Engine:\n{}'.format(e.job)) - elapsed_minutes = time_elapsed(time_start) - print_exception('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_exception( + f"A job failed in the Delphix Engine:\n{err.job}." + f"{basename(__file__)} took {elapsed_minutes} minutes to get " + f"this far" + ) sys.exit(3) except KeyboardInterrupt: # We use this exception handler to gracefully handle ctrl+c exits - print_debug('You sent a CTRL+C to interrupt the process') - elapsed_minutes = time_elapsed(time_start) - print_info('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) - except: - # Everything else gets caught here - print_exception('{}\n{}'.format(sys.exc_info()[0], - traceback.format_exc())) - elapsed_minutes = time_elapsed(time_start) - print_info("{} took {:.2f} minutes to get this far".format( - basename(__file__), elapsed_minutes)) - sys.exit(1) + dx_logging.print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"{basename(__file__)} took {elapsed_minutes} " f"minutes to get this far." + ) -if __name__ == "__main__": - # Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - # Feed our arguments to the main function, and off we go! +if __name__ == "__main__": + # Grab our ARGUMENTS from the doc at the top of the script + ARGUMENTS = docopt.docopt(__doc__, version=basename(__file__) + " " + VERSION) + # Feed our ARGUMENTS to the main function, and off we go! main() diff --git a/dx_skel.py b/dx_skel.py deleted file mode 100755 index accc992..0000000 --- a/dx_skel.py +++ /dev/null @@ -1,315 +0,0 @@ -#!/usr/bin/env python -# Corey Brune - Feb 2017 -#Description: -# This is a skeleton script which has all of the common functionality. -# The developer will only need to add the necessary arguments and functions -# then make the function calls in main_workflow(). -#Requirements -#pip install docopt delphixpy.v1_8_0 - -#The below doc follows the POSIX compliant standards and allows us to use -#this doc to also define our arguments for the script. -"""Description -Usage: - dx_skel.py () - [--engine | --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - dx_skel.py -h | --help | -v | --version -Description - -Examples: - - -Options: - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --debug Enable debug logging - --parallel Limit number of jobs to maxjob - --poll The number of seconds to wait between job polls - [default: 10] - --config The path to the dxtools.conf file - [default: ./dxtools.conf] - --logdir The path to the logfile you want to use. - [default: ./dx_skel.log] - -h --help Show this screen. - -v --version Show version. -""" - -VERSION = 'v.0.0.001' - -import sys -from os.path import basename -from time import sleep, time -from docopt import docopt - -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.web import job - -from lib.DlpxException import DlpxException -from lib.DxLogging import logging_est -from lib.DxLogging import print_debug -from lib.DxLogging import print_info -from lib.DxLogging import print_exception -from lib.GetReferences import find_obj_by_name -from lib.GetSession import GetSession - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - E.g.: - @run_async - def task1(): - do_something - @run_async - def task2(): - do_something_too - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func - - -@run_async -def main_workflow(engine): - """ - This function actually runs the jobs. - Use the @run_async decorator to run this function asynchronously. - This allows us to run against multiple Delphix Engine simultaneously - - engine: Dictionary of engines - """ - try: - #Setup the connection to the Delphix Engine - dx_session_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - - if arguments['--vdb']: - #Get the database reference we are copying from the database name - database_obj = find_obj_by_name(dx_session_obj.server_session, - database, arguments['--vdb']) - - except DlpxException as e: - print_exception('\nERROR: Engine {} encountered an error while' - '{}:\n{}\n'.format(engine['hostname'], - arguments['--target'], e)) - sys.exit(1) - - thingstodo = ["thingtodo"] - try: - with dx_session_obj.job_mode(single_thread): - while (len(dx_session_obj.jobs) > 0 or len(thingstodo)> 0): - if len(thingstodo) > 0: - if OPERATION: - method_call - - elif OPERATION: - method_call - thingstodo.pop() - # get all the jobs, then inspect them - i = 0 - for j in dx_session_obj.jobs.keys(): - job_obj = job.get(dx_session_obj.server_session, - dx_session_obj.jobs[j]) - print_debug(job_obj) - print_info('{}: Replication operations: {}'.format( - engine['hostname'], job_obj.job_state)) - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - # If the job is in a non-running state, remove it - # from the - # running jobs list. - del dx_session_obj.jobs[j] - elif job_obj.job_state in 'RUNNING': - # If the job is in a running state, increment the - # running job count. - i += 1 - print_info('{}: {:d} jobs running.'.format( - engine['hostname'], i)) - # If we have running jobs, pause before repeating the - # checks. - if len(dx_session_obj.jobs) > 0: - sleep(float(arguments['--poll'])) - - except (HttpError, RequestError, JobError, DlpxException) as e: - print_exception('ERROR: Could not complete replication ' - 'operation:{}'.format(e)) - - -def run_job(): - """ - This function runs the main_workflow aynchronously against all the servers - specified - """ - #Create an empty list to store threads we create. - threads = [] - engine = None - - #If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info("Executing against all Delphix Engines in the dxtools.conf") - - try: - #For each server in the dxtools.conf... - for delphix_engine in dx_session_obj.dlpx_engines: - engine = dx_session_obj[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine)) - - except DlpxException as e: - print 'Error encountered in run_job():\n{}'.format(e) - sys.exit(1) - - elif arguments['--all'] is False: - #Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dx_session_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: {}\n'.format( - (arguments['--engine']))) - - except (DlpxException, RequestError, KeyError) as e: - raise DlpxException('\nERROR: Delphix Engine {} cannot be ' - 'found in {}. Please check your value ' - 'and try again. Exiting.\n'.format( - arguments['--engine'], config_file_path)) - - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dx_session_obj.dlpx_engines: - if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ - 'true': - - engine = dx_session_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: {}'.format( - dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) - - break - - if engine == None: - raise DlpxException("\nERROR: No default engine found. Exiting") - - #run the job against the engine - threads.append(main_workflow(engine)) - - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def time_elapsed(): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - """ - #elapsed_minutes = round((time() - time_start)/60, +1) - #return elapsed_minutes - return round((time() - time_start)/60, +1) - - -def main(arguments): - #We want to be able to call on these variables anywhere in the script. - global single_thread - global usebackup - global time_start - global config_file_path - global dx_session_obj - global debug - - if arguments['--debug']: - debug = True - - try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - time_start = time() - single_thread = False - config_file_path = arguments['--config'] - #Parse the dxtools.conf and put it into a dictionary - dx_session_obj.get_config(config_file_path) - - #This is the function that will handle processing main_workflow for - # all the servers. - run_job() - - elapsed_minutes = time_elapsed() - print_info('script took {:.2f} minutes to get this far.'.format( - elapsed_minutes)) - - #Here we handle what we do when the unexpected happens - except DlpxException as e: - print_exception('script encountered an error while processing the' - 'config file:\n{}'.format(e)) - - except SystemExit as e: - """ - This is what we use to handle our sys.exit(#) - """ - sys.exit(e) - - except HttpError as e: - """ - We use this exception handler when our connection to Delphix fails - """ - print_exception('Connection failed to the Delphix Engine' - 'Please check the ERROR message:\n{}'.format(e)) - sys.exit(1) - - except JobError as e: - """ - We use this exception handler when a job fails in Delphix so that - we have actionable data - """ - elapsed_minutes = time_elapsed() - print_exception('A job failed in the Delphix Engine') - print_info('{} took {:.2f} minutes to get this far\n{}'.format( - basename(__file__), elapsed_minutes, e)) - sys.exit(3) - - except KeyboardInterrupt: - """ - We use this exception handler to gracefully handle ctrl+c exits - """ - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far\n'.format( - basename(__file__), elapsed_minutes)) - - except: - """ - Everything else gets caught here - """ - print_exception(sys.exc_info()[0]) - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far\n'.format( - basename(__file__), elapsed_minutes)) - sys.exit(1) - -if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - #Feed our arguments to the main function, and off we go! - main(arguments) diff --git a/dx_snapshot_db.py b/dx_snapshot_db.py index 19f2342..888ef20 100755 --- a/dx_snapshot_db.py +++ b/dx_snapshot_db.py @@ -1,619 +1,315 @@ -#!/usr/bin/env python -#Adam Bowen - Apr 2016 -#This script snapshots a vdb or dSource -#Corey Brune - March 2017 +#!/usr/bin/env python3 +# Adam Bowen - Apr 2016 +# This script snapshots a vdb or dSource +# Corey Brune - March 2017 # Updated to allow backup of Sybase -#requirements -#pip install docopt delphixpy +# requirements +# pip install docopt delphixpy -#The below doc follows the POSIX compliant standards and allows us to use -#this doc to also define our arguments for the script. This thing is brilliant. +# The below doc follows the POSIX compliant standards and allows us to use +# This doc to also define our ARGUMENTS for the script. """Snapshot dSources and VDB's Usage: - dx_snapshot_db.py (--group [--name ] | --all_dbs ) - [--engine | --all] - [--usebackup] [--bck_file ] [--debug] [--parallel ] - [--poll ][--create_bckup] - [--config ] [--logdir ] - dx_snapshot_db.py (--host [--group ] [--object_type ] - | --object_type [--group ] [--host ] ) - [-d | --engine | --all] - [--usebackup] [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] + dx_snapshot_db.py (--group |--name | --all_dbs ) + [--engine ] + [--usebackup --bck_file --parallel ] + [--poll --create_bckup --single_thread ] + [--config --logdir ] dx_snapshot_db.py -h | --help | -v | --version Snapshot a Delphix dSource or VDB Examples: - dx_snapshot_db.py --group "Sources" --object_type dsource --usebackup + dx_snapshot_db.py --group "Sources" --usebackup dx_snapshot_db.py --name "Employee Oracle 11G DB" - dx_snapshot_db.py --host LINUXSOURCE --parallel 2 --usebackup dx_snapshot_db.py --name dbw2 --usebackup --group Sources --create_bckup - dx_snapshot_db.py --name dbw2 --usebackup --group Sources --bck_file dbw2_full_20170317_001.dmp - dx_snapshot_db.py --host LINUXSOURCE --parallel 4 --usebackup --debug -d landsharkengine - + dx_snapshot_db.py --name dbw2 --usebackup --group Production \ + --bck_file dbw2_full_20170317_001.dmp Options: - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. + --engine Alt Identifier of Delphix engine in dxtools.conf. + [default: default] --all_dbs Run against all database objects - --bck_file Name of the specific ASE Sybase backup file(s). + --single_thread Run as a single thread. False if running multiple + threads. + [default: False] + --bck_file Name of the specific ASE Sybase backup file(s) + or backup uuid for MSSQL. + [default: None] --name Name of object in Delphix to execute against. --group Name of group in Delphix to execute against. - --host Name of environment in Delphix to execute against. - --object_type dsource or vdb. --usebackup Snapshot using "Most Recent backup". Available for MSSQL and ASE only. - --create_bckup Create and ingest a new Sybase backup - --debug Enable debug logging + [default: False] + --create_bckup Create and ingest a new Sybase backup or + copy-only MS SQL backup + [default: False] --parallel Limit number of jobs to maxjob --poll The number of seconds to wait between job polls [default: 10] --config The path to the dxtools.conf file - [default: ./dxtools.conf] + [default: ./config/dxtools.conf] --logdir The path to the logfile you want to use. - [default: ./dx_snapshot_db.log] + [default: ./logs/dx_snapshot_db.log] -h --help Show this screen. -v --version Show version. - """ -VERSION="v.0.0.100" - - -from docopt import docopt -import logging -from os.path import basename -import signal import sys import time -import traceback -import json - -from multiprocessing import Process -from time import sleep, time - -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.exceptions import HttpError, JobError -from delphixpy.v1_6_0 import job_context -from delphixpy.v1_6_0.web import database, environment, group, job, source, user -from delphixpy.v1_6_0.web.vo import ASESpecificBackupSyncParameters, ASENewBackupSyncParameters, ASELatestBackupSyncParameters, MSSqlSyncParameters - -def ase_latest_backup_sync_parameters(): - obj = ASELatestBackupSyncParameters() - -def find_obj_by_name(engine, server, f_class, obj_name): - """ - Function to find objects by name and object class, and return object's reference as a string - You might use this function to find objects like groups. - """ - print_debug(engine["hostname"] + ": Searching objects in the " + f_class.__name__ + " class\n for one named \"" + obj_name +"\"") - obj_ref = '' - - all_objs = f_class.get_all(server) - for obj in all_objs: - if obj.name == obj_name: - print_debug(engine["hostname"] + ": Found a match " + str(obj.reference)) - return obj - -def find_all_databases_by_group_name(engine, server, group_name, exclude_js_container=False): - """ - Easy way to quickly find databases by group name - """ - - #First search groups for the name specified and return its reference - group_obj = find_obj_by_name(engine, server, group, group_name) - if group_obj: - databases=database.get_all(server, group=group_obj.reference, no_js_container_data_source=exclude_js_container) - return databases - -def find_database_by_name_and_group_name(engine, server, group_name, database_name): - - databases = find_all_databases_by_group_name(engine, server, group_name) - - for each in databases: - if each.name == database_name: - print_debug(engine["hostname"] + ": Found a match " + str(each.reference)) - return each - print_info("Unable to find \"" + database_name + "\" in " + group_name) - -def find_source_by_database(engine, server, database_obj): - #The source tells us if the database is enabled/disables, virtual, vdb/dSource, or is a staging database. - source_obj = source.get_all(server, database=database_obj.reference) - #We'll just do a little sanity check here to ensure we only have a 1:1 result. - if len(source_obj) == 0: - print_error(engine["hostname"] + ": Did not find a source for " + database_obj.name + ". Exiting") - sys.exit(1) - elif len(source_obj) > 1: - print_error(engine["hostname"] + ": More than one source returned for " + database_obj.name + ". Exiting") - print_error(source_obj) - sys.exit(1) - return source_obj - -def get_config(config_file_path): - """ - This function reads in the dxtools.conf file - """ - #First test to see that the file is there and we can open it - try: - config_file = open(config_file_path).read() - except: - print_error("Was unable to open " + config_file_path + ". Please check the path and permissions, then try again.") - sys.exit(1) - #Now parse the file contents as json and turn them into a python dictionary, throw an error if it isn't proper json - try: - config = json.loads(config_file) - except: - print_error("Was unable to read " + config_file_path + " as json. Please check file in a json formatter and try again.") - sys.exit(1) - #Create a dictionary of engines (removing the data node from the dxtools.json, for easier parsing) - delphix_engines = {} - for each in config['data']: - delphix_engines[each['hostname']] = each - print_debug(delphix_engines) - return delphix_engines - -def logging_est(logfile_path): - """ - Establish Logging - """ - global debug - logging.basicConfig(filename=logfile_path,format='%(levelname)s:%(asctime)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') - print_info("Welcome to " + basename(__file__) + ", version " + VERSION) - global logger - debug = arguments['--debug'] - logger = logging.getLogger() - if debug == True: - logger.setLevel(10) - print_info("Debug Logging is enabled.") - -def job_mode(server): - """ - This function tells Delphix how to execute jobs, based on the single_thread variable at the beginning of the file - """ - #Synchronously (one at a time) - if single_thread == True: - job_m = job_context.sync(server) - print_debug("These jobs will be executed synchronously") - #Or asynchronously - else: - job_m = job_context.async(server) - print_debug("These jobs will be executed asynchronously") - return job_m - -def job_wait(): - """ - This job stops all work in the thread/process until jobs are completed. - """ - #Grab all the jos on the server (the last 25, be default) - all_jobs = job.get_all(server) - #For each job in the list, check to see if it is running (not ended) - for jobobj in all_jobs: - if not (jobobj.job_state in ["CANCELED", "COMPLETED", "FAILED"]): - print_debug("Waiting for " + jobobj.reference + " (currently: " + jobobj.job_state+ ") to finish running against the container") - #If so, wait - job_context.wait(server,jobobj.reference) - -def on_exit(sig, func=None): - """ - This function helps us end cleanly and with exit codes - """ - print_info("Shutdown Command Received") - print_info("Shutting down " + basename(__file__)) - sys.exit(0) - -def print_debug(print_obj): - """ - Call this function with a log message to prefix the message with DEBUG - """ - try: - if debug == True: - print "DEBUG: " + str(print_obj) - logging.debug(str(print_obj)) - except: - pass - -def print_error(print_obj): - """ - Call this function with a log message to prefix the message with ERROR - """ - print "ERROR: " + str(print_obj) - logging.error(str(print_obj)) - -def print_info(print_obj): - """ - Call this function with a log message to prefix the message with INFO - """ - print "INFO: " + str(print_obj) - logging.info(str(print_obj)) - -def print_warning(print_obj): - """ - Call this function with a log message to prefix the message with WARNING - """ - print "WARNING: " + str(print_obj) - logging.warning(str(print_obj)) - -def serversess(f_engine_address, f_engine_username, f_engine_password): - """ - Function to setup the session with the Delphix Engine - """ - server_session= DelphixEngine(f_engine_address, f_engine_username, f_engine_password, "DOMAIN") - return server_session - -def set_exit_handler(func): - """ - This function helps us set the correct exit code - """ - signal.signal(signal.SIGTERM, func) - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - - E.g.: - @run_async - def task1(): - do_something - - @run_async - def task2(): - do_something_too - - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func - -@run_async -def main_workflow(engine): - """ - This function is where the main workflow resides. - Use the @run_async decorator to run this function asynchronously. - This allows us to run against multiple Delphix Engine simultaneously - """ - - #Pull out the values from the dictionary for this engine - engine_address = engine["ip_address"] - engine_username = engine["username"] - engine_password = engine["password"] - #Establish these variables as empty for use later - databases = [] - environment_obj = None - source_objs = None - jobs = {} - - - #Setup the connection to the Delphix Engine - server = serversess(engine_address, engine_username, engine_password) - - #If an environment/server was specified - if host_name: - print_debug(engine["hostname"] + ": Getting environment for " + host_name) - #Get the environment object by the hostname - environment_obj = find_obj_by_name(engine, server, environment, host_name) - if environment_obj != None: - #Get all the sources running on the server - env_source_objs = source.get_all(server, environment=environment_obj.reference) - #If the server doesn't have any objects, exit. - if env_source_objs == None: - print_error(host_name + "does not have any objects. Exiting") - sys.exit(1) - #If we are only filtering by the server, then put those objects in the main list for processing - if not(arguments['--group'] and database_name): - source_objs = env_source_objs - all_dbs = database.get_all(server, no_js_container_data_source=False) - databases = [] - for source_obj in source_objs: - if source_obj.staging == False and source_obj.virtual == True: - database_obj = database.get(server, source_obj.container) - if database_obj in all_dbs: - databases.append(database_obj) - else: - print_error(engine["hostname"] + ":No environment found for " + host_name + ". Exiting") - sys.exit(1) - #If we specified a specific database by name.... - if arguments['--name']: - #Get the database object from the name - database_obj = find_database_by_name_and_group_name(engine, server, arguments['--group'], arguments['--name']) - if database_obj: - databases.append(database_obj) - #Else if we specified a group to filter by.... - elif arguments['--group']: - print_debug(engine["hostname"] + ":Getting databases in group " + arguments['--group']) - #Get all the database objects in a group. - databases = find_all_databases_by_group_name(engine, server, arguments['--group']) - #Else, if we said all vdbs ... - elif arguments['--all_dbs'] and not arguments['--host'] : - #Grab all databases - databases = database.get_all(server, no_js_container_data_source=False) - elif arguments['--object_type'] and not arguments['--host'] : - databases = database.get_all(server) - if not databases or len(databases) == 0: - print_error("No databases found with the criterion specified") - return - #reset the running job count before we begin - i = 0 - with job_mode(server): - #While there are still running jobs or databases still to process.... - while (len(jobs) > 0 or len(databases) > 0): - #While there are databases still to process and we are still under - #the max simultaneous jobs threshold (if specified) - while len(databases) > 0 and (arguments['--parallel'] == None or i < int(arguments['--parallel'])): - #Give us the next database in the list, and remove it from the list - database_obj = databases.pop() - #Get the source of the database. - #The source tells us if the database is enabled/disables, virtual, vdb/dSource, or is a staging database. - source_obj = find_source_by_database(engine, server, database_obj) - #If we applied the environment/server filter AND group filter, find the intersecting matches - if environment_obj != None and (arguments['--group']): - match = False - for env_source_obj in env_source_objs: - if source_obj[0].reference in env_source_obj.reference: - match = True - break - if match == False: - print_error(engine["hostname"] + ": " + database_obj.name + " does not exist on " + host_name + ". Exiting") - return - #Snapshot the database - snapshot_job = snapshot_database(engine, server, jobs, source_obj[0], database_obj, arguments['--object_type']) - #If snapshot_job has any value, then we know that a job was initiated. - if snapshot_job: - #increment the running job count - i += 1 - #Check to see if we are running at max parallel processes, and report if so. - if ( arguments['--parallel'] != None and i >= int(arguments['--parallel'])): - print_info(engine["hostname"] + ": Max jobs reached (" + str(i) + ")") - #reset the running jobs counter, as we are about to update the count from the jobs report. - i = update_jobs_dictionary(engine, server, jobs) - print_info(engine["hostname"] + ": " + str(i) + " jobs running. " + str(len(databases)) + " jobs waiting to run") - #If we have running jobs, pause before repeating the checks. - if len(jobs) > 0: - sleep(float(arguments['--poll'])) - -def run_job(engine): - """ - This function runs the main_workflow aynchronously against all the servers specified - """ - #Create an empty list to store threads we create. - threads = [] - #If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info("Executing against all Delphix Engines in the dxtools.conf") - #For each server in the dxtools.conf... - for delphix_engine in dxtools_objects: - engine = dxtools_objects[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine)) - else: - #Else if the --engine argument was given, test to see if the engine exists in dxtools.conf - if arguments['--engine']: - try: - engine = dxtools_objects[arguments['--engine']] - print_info("Executing against Delphix Engine: " + arguments['--engine']) - except: - print_error("Delphix Engine \"" + arguments['--engine'] + "\" cannot be found in " + config_file_path) - print_error("Please check your value and try again. Exiting") - sys.exit(1) - #Else if the -d argument was given, test to see if the engine exists in dxtools.conf - elif arguments['-d']: - try: - engine = dxtools_objects[arguments['-d']] - print_info("Executing against Delphix Engine: " + arguments['-d']) - except: - print_error("Delphix Engine \"" + arguments['-d'] + "\" cannot be found in " + config_file_path) - print_error("Please check your value and try again. Exiting") - sys.exit(1) - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dxtools_objects: - if dxtools_objects[delphix_engine]['default'] == 'true': - engine = dxtools_objects[delphix_engine] - print_info("Executing against the default Delphix Engine in the dxtools.conf: " + dxtools_objects[delphix_engine]['hostname']) - break - if engine == None: - print_error("No default engine found. Exiting") - sys.exit(1) - #run the job against the engine - threads.append(main_workflow(engine)) - - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete before moving on - each.join() +from os.path import basename -def snapshot_database(engine, server, jobs, source_obj, container_obj, obj_type=None): - """ - This function - FYI - Snapshot is also called sync - """ - #Sanity check to make sure our source object has a reference - if source_obj.reference != None : - #If we specified the --object_type flag, ensure this source is a match. Skip, if not. - if obj_type != None and ((obj_type.lower() == "vdb" and source_obj.virtual != True ) or (obj_type.lower() == "dsource" and source_obj.virtual != False )): - print_warning(engine["hostname"] + ": " + container_obj.name + " is not a " + obj_type.lower() + ". Skipping sync") - #Ensure this source is not a staging database. We can't act upon those. - elif source_obj.staging == True: - print_warning(engine["hostname"] + ": " + container_obj.name + " is a staging database. Skipping.") - #Ensure the source is enabled. We can't snapshot disabled databases. - elif source_obj.runtime.enabled == "ENABLED" : - print_info(engine["hostname"] + ": Syncing " + container_obj.name ) - print_debug(engine["hostname"] + ": Type: " + source_obj.type ) - print_debug(engine["hostname"] + ": " +source_obj.type) - #If the database is a dSource and a MSSQL type, we need to tell Delphix how we want to sync the database... - #Delphix will just ignore the extra parameters if it is a VDB, so we will omit any extra code to check - if (source_obj.type == "MSSqlLinkedSource"): - sync_params = MSSqlSyncParameters() - #From last backup? - if usebackup == True: - sync_params.load_from_backup = True - print_info(engine["hostname"] + ": MSSQL database. Creating snapshot of " + container_obj.name + " from Latest Full backup.") - #Or take a new backup? - else: - sync_params.load_from_backup = False - print_info(engine["hostname"] + ": MSSQL database. Creating snapshot of " + container_obj.name + " from New Full backup.") - print_debug(engine["hostname"] + ": " +str(sync_params)) - #Sync it - database.sync(server, container_obj.reference, sync_params) - #Else if the database is a dSource and a ASE type, we need also to tell Delphix how we want to sync the database... - #Delphix will just ignore the extra parameters if it is a VDB, so we will omit any extra code to check - elif (source_obj.type == "ASELinkedSource"): - if usebackup == True: - if arguments['--bck_file']: - sync_params = ASESpecificBackupSyncParameters() - sync_params.backup_files = (arguments['--bck_file']).split(' ') - elif arguments['--create_bckup']: - sync_params = ASENewBackupSyncParameters() +import docopt + +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import database +from delphixpy.v1_10_2.web import source +from delphixpy.v1_10_2.web import vo +from lib import dlpx_exceptions +from lib import dx_logging +from lib import get_references +from lib import get_session +from lib import run_job +from lib.run_async import run_async + +VERSION = "v.0.3.002" + + +def snapshot_database( + dlpx_obj, + db_name=None, + all_or_group_dbs=None, + use_backup=False, + backup_file=None, + create_bckup=False, +): + """ + Create a snapshot (sync) of a dSource or VDB + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param db_name: Name of the VDB or dSource to snapshot + :type db_name: str or None + :param all_or_group_dbs: List containing all DB on or all DBs in a group + :type all_or_group_dbs: list or None + :param use_backup: Snapshot using "Most recent backup" + :type use_backup: bool + :param backup_file: File to use for this snapshot + :type backup_file: str or None + :param create_bckup: Create a backup to use for the snapshot + :type create_bckup: bool + """ + sync_params = None + if isinstance(db_name, str): + all_or_group_dbs = [db_name] + + for db_sync in all_or_group_dbs: + try: + db_source_info = get_references.find_obj_by_name( + dlpx_obj.server_session, source, db_sync + ) + container_obj_ref = get_references.find_obj_by_name( + dlpx_obj.server_session, database, db_sync + ).reference + except dlpx_exceptions.DlpxObjectNotFound as err: + raise dlpx_exceptions.DlpxException from err + if db_source_info.staging: + raise dlpx_exceptions.DlpxException( + f"{db_sync} is a staging " f"database. Cannot Sync.\n" + ) + if db_source_info.runtime.enabled != "ENABLED": + raise dlpx_exceptions.DlpxException( + f"{db_sync} is not enabled " f"database. Cannot Sync.\n" + ) + if db_source_info.runtime.enabled == "ENABLED": + # If the database is a dSource and a MSSQL type, we need to tell + # Delphix how we want to sync the database. + # Delphix will just ignore the extra parameters if it is a VDB, + # so we will omit any extra code to check + if db_source_info.type == "MSSqlLinkedSource": + if create_bckup is True: + sync_params = vo.MSSqlNewCopyOnlyFullBackupSyncParameters() + sync_params.compression_enabled = False + elif use_backup is True: + if backup_file != None: + sync_params = vo.MSSqlExistingSpecificBackupSyncParameters() + sync_params.backup_uuid = backup_file + else: + sync_params = vo.MSSqlExistingMostRecentBackupSyncParameters() + # Else if the database is a dSource and a ASE type, we need also to + # tell Delphix how we want to sync the database... + # Delphix will just ignore the extra parameters if it is a VDB, so + # we will omit any extra code to check + elif db_source_info.type == "ASELinkedSource": + if use_backup is True: + if backup_file: + sync_params = vo.ASESpecificBackupSyncParameters() + sync_params.backup_files = backup_file.split(" ") + elif create_bckup: + sync_params = vo.ASENewBackupSyncParameters() else: - sync_params = ASELatestBackupSyncParameters() - print_info(engine["hostname"] + ": ASE database. Creating snapshot of " + container_obj.name + " from Latest Full backup.") - #Or take a new backup? + sync_params = vo.ASELatestBackupSyncParameters() else: - sync_params = ASENewBackupSyncParameters() - print_info(engine["hostname"] + ": ASE database. Creating snapshot of " + container_obj.name + " from Full backup.") - print_debug(engine["hostname"] + ": " +str(sync_params)) - #Sync it - database.sync(server, container_obj.reference, sync_params) - #If it isn't MSSQL or ASE, Delphix can just go ahead and sync the database + sync_params = vo.ASENewBackupSyncParameters() + if sync_params: + database.sync(dlpx_obj.server_session, container_obj_ref, sync_params) else: - #Sync it - database.sync(server, container_obj.reference) - #Add the job into the jobs dictionary so we can track its progress - jobs[container_obj] = server.last_job - #return the job object to the calling statement so that we can tell if a job was created or not (will return None, if no job) - return server.last_job - #Don't do anything if the database is disabled - else: - print_warning(engine["hostname"] + ": " + container_obj.name + " is not enabled. Skipping sync") + database.sync(dlpx_obj.server_session, container_obj_ref) + # Add the job into the jobs dictionary so we can track its progress + dlpx_obj.jobs[dlpx_obj.server_session.address].append( + dlpx_obj.server_session.last_job + ) -def time_elapsed(): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - """ - elapsed_minutes = round((time() - time_start)/60, +1) - return elapsed_minutes -def update_jobs_dictionary(engine, server, jobs): +@run_async +def main_workflow(engine, dlpx_obj, single_thread): """ - This function checks each job in the dictionary and updates its status or removes it if the job is complete. - Return the number of jobs still running. + This function is where we create our main workflow. + Use the @run_async decorator to run this function asynchronously. + The @run_async decorator allows us to run against multiple Delphix Engine + simultaneously + :param engine: Dictionary of engines + :type engine: dictionary + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param single_thread: True - run single threaded, False - run multi-thread + :type single_thread: bool """ - #Establish the running jobs counter, as we are about to update the count from the jobs report. - i = 0 - #get all the jobs, then inspect them - for j in jobs.keys(): - job_obj = job.get(server, jobs[j]) - print_debug(engine["hostname"] + ": " + str(job_obj)) - print_info(engine["hostname"] + ": " + j.name + ": " + job_obj.job_state) - - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - #If the job is in a non-running state, remove it from the running jobs list. - del jobs[j] - else: - #If the job is in a running state, increment the running job count. - i += 1 - return i - -def main(argv): - #We want to be able to call on these variables anywhere in the script. - global single_thread - global usebackup - global time_start - global host_name - global database_name - global config_file_path - global dxtools_objects - - - try: - #Declare globals that will be used throughout the script. - logging_est(arguments['--logdir']) - print_debug(arguments) - time_start = time() - engine = None - single_thread = False - usebackup = arguments['--usebackup'] - database_name = arguments['--name'] - host_name = arguments['--host'] - config_file_path = arguments['--config'] - #Parse the dxtools.conf and put it into a dictionary - dxtools_objects = get_config(config_file_path) - - #This is the function that will handle processing main_workflow for all the servers. - run_job(engine) - - elapsed_minutes = time_elapsed() - print_info("script took " + str(elapsed_minutes) + " minutes to get this far.") - + # Setup the connection to the Delphix DDP + dlpx_obj.dlpx_session( + engine["ip_address"], + engine["username"], + engine["password"], + engine["use_https"], + ) + except dlpx_exceptions.DlpxException as err: + dx_logging.print_exception( + f"ERROR: {basename(__file__)} encountered an error authenticating" + f' to {engine["hostname"]} {ARGUMENTS["--target"]}:\n{err}' + ) + try: + with dlpx_obj.job_mode(single_thread): + if ARGUMENTS["--name"] is not None: + snapshot_database( + dlpx_obj, + ARGUMENTS["--name"], + None, + ARGUMENTS["--usebackup"], + ARGUMENTS["--bck_file"], + ARGUMENTS["--create_bckup"], + ) + if ARGUMENTS["--group"]: + databases = get_references.find_all_databases_by_group( + dlpx_obj.server_session, ARGUMENTS["--group"] + ) + database_lst = [] + for db_name in databases: + database_lst.append(db_name.name) + snapshot_database( + dlpx_obj, + None, + database_lst, + ARGUMENTS["--usebackup"], + ARGUMENTS["--bck_file"], + ARGUMENTS["--create_bckup"], + ) + elif ARGUMENTS["--all_dbs"]: + # Grab all databases + databases = database.get_all( + dlpx_obj.server_session, no_js_data_source=False + ) + database_lst = [] + for db_name in databases: + database_lst.append(db_name.name) + snapshot_database( + dlpx_obj, + None, + database_lst, + ARGUMENTS["--usebackup"], + ARGUMENTS["--bck_file"], + ARGUMENTS["--create_bckup"], + ) + run_job.track_running_jobs(engine, dlpx_obj) + except ( + dlpx_exceptions.DlpxObjectNotFound, + exceptions.RequestError, + exceptions.JobError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f'Error in {basename(__file__)}: {engine["hostname"]}\n{err}' + ) + + +def main(): + """ + main function - creates session and runs jobs + """ + time_start = time.time() + try: + dx_session_obj = get_session.GetSession() + dx_logging.logging_est(ARGUMENTS["--logdir"]) + config_file_path = ARGUMENTS["--config"] + single_thread = ARGUMENTS["--single_thread"] + engine = ARGUMENTS["--engine"] + dx_session_obj.get_config(config_file_path) + # This is the function that will handle processing main_workflow for + # all the servers. + for each in run_job.run_job_mt( + main_workflow, dx_session_obj, engine, single_thread + ): + # join them back together so that we wait for all threads to + # complete + each.join() + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"script took {elapsed_minutes} minutes to " f"get this far." + ) + # Here we handle what we do when the unexpected happens + except SystemExit as err: + # This is what we use to handle our sys.exit(#) + sys.exit(err) + + except dlpx_exceptions.DlpxException as err: + # We use this exception handler when an error occurs in a function + # call. + dx_logging.print_exception( + f"ERROR: Please check the ERROR message " f"below:\n {err.error}" + ) + sys.exit(2) - #Here we handle what we do when the unexpected happens - except SystemExit as e: - """ - This is what we use to handle our sys.exit(#) - """ - sys.exit(e) - except HttpError as e: - """ - We use this exception handler when our connection to Delphix fails - """ - print_error("Connection failed to the Delphix Engine") - print_error( "Please check the ERROR message below") - print_error(e.message) + except exceptions.HttpError as err: + # We use this exception handler when our connection to Delphix fails + dx_logging.print_exception( + f"ERROR: Connection failed to the Delphix DDP. Please check " + f"the ERROR message below:\n{err.status}" + ) sys.exit(2) - except JobError as e: - """ - We use this exception handler when a job fails in Delphix so that we have actionable data - """ - print_error("A job failed in the Delphix Engine") - print_error(e.job) - elapsed_minutes = time_elapsed() - print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") + + except exceptions.JobError as err: + # We use this exception handler when a job fails in Delphix so that we + # have actionable data + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_exception( + f"A job failed in the Delphix Engine:\n{err.job}." + f"{basename(__file__)} took {elapsed_minutes} minutes to get " + f"this far" + ) sys.exit(3) + except KeyboardInterrupt: - """ - We use this exception handler to gracefully handle ctrl+c exits - """ - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") - except: - """ - Everything else gets caught here - """ - print_error(sys.exc_info()[0]) - print_error(traceback.format_exc()) - elapsed_minutes = time_elapsed() - print_info(basename(__file__) + " took " + str(elapsed_minutes) + " minutes to get this far.") - sys.exit(1) + # We use this exception handler to gracefully handle ctrl+c exits + dx_logging.print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"{basename(__file__)} took {elapsed_minutes} " f"minutes to get this far." + ) + if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - #I added this below condition to account for my --name | or AT LEAST ONE OF --group --host --object_type - #I couldn't quite sort it out with docopt. Maybe I'm just dense today. - #Anyway, if none of the four options are given, print the __doc__ and exit. - if not(arguments['--name']) and not(arguments['--group']) and not(arguments['--host']) and not(arguments['--object_type']) and not(arguments['--all_dbs']): - print(__doc__) - sys.exit() - #Feed our arguments to the main function, and off we go! - main(arguments) \ No newline at end of file + # Grab our ARGUMENTS from the doc at the top of the script + ARGUMENTS = docopt.docopt(__doc__, version=basename(__file__) + " " + VERSION) + # Feed our ARGUMENTS to the main function, and off we go! + main() diff --git a/dx_update_env.py b/dx_update_env.py deleted file mode 100755 index 2eb972a..0000000 --- a/dx_update_env.py +++ /dev/null @@ -1,326 +0,0 @@ -#!/usr/bin/env python -# Corey Brune - Feb 2017 -#Description: -# Update Environment -# -#Requirements -#pip install docopt delphixpy.v1_8_0 - -#The below doc follows the POSIX compliant standards and allows us to use -#this doc to also define our arguments for the script. -"""Description -Usage: - dx_update_env.py (--pw --env_name ) - [--engine | --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - dx_update_env.py -h | --help | -v | --version -Description - -Examples: - - -Options: - --pw Password - --env_name Name of the environment - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --debug Enable debug logging - --parallel Limit number of jobs to maxjob - --poll The number of seconds to wait between job polls - [default: 10] - --config The path to the dxtools.conf file - [default: ./dxtools.conf] - --logdir The path to the logfile you want to use. - [default: ./dx_operations_vdb.log] - -h --help Show this screen. - -v --version Show version. -""" - -VERSION = 'v.0.0.002' - -import sys -from os.path import basename -from time import sleep, time -from docopt import docopt - -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web import environment -from delphixpy.v1_8_0.web.vo import ASEHostEnvironmentParameters -from delphixpy.v1_8_0.web.vo import UnixHostEnvironment - - -from lib.DlpxException import DlpxException -from lib.DxLogging import logging_est -from lib.DxLogging import print_debug -from lib.DxLogging import print_info -from lib.DxLogging import print_exception -from lib.GetReferences import find_obj_by_name -from lib.GetSession import GetSession - -def update_ase_db_pw(): - - env_obj = UnixHostEnvironment() - env_obj.ase_host_environment_parameters = ASEHostEnvironmentParameters() - env_obj.ase_host_environment_parameters.credentials = {'type': - 'PasswordCredential', - 'password': arguments['--pw']} - - try: - environment.update(dx_session_obj.server_session, find_obj_by_name( - dx_session_obj.server_session, environment, - arguments['--env_name'], env_obj).reference, env_obj) - - except (HttpError, RequestError) as e: - print_exception('Could not update ASE DB Password:\n{}'.format(e)) - sys.exit(1) - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - E.g.: - @run_async - def task1(): - do_something - @run_async - def task2(): - do_something_too - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func - - -@run_async -def main_workflow(engine): - """ - This function actually runs the jobs. - Use the @run_async decorator to run this function asynchronously. - This allows us to run against multiple Delphix Engine simultaneously - - engine: Dictionary of engines - """ - jobs = {} - - try: - #Setup the connection to the Delphix Engine - dx_session_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - - except DlpxException as e: - print_exception('\nERROR: Engine %s encountered an error while' - '%s:\n%s\n' % (engine['hostname'], - arguments['--target'], e)) - sys.exit(1) - - thingstodo = ["thingtodo"] - #reset the running job count before we begin - i = 0 - with dx_session_obj.job_mode(single_thread): - while (len(jobs) > 0 or len(thingstodo)> 0): - if len(thingstodo)> 0: - if arguments['--pw']: - update_ase_db_pw() - - #elif OPERATION: - # method_call - - thingstodo.pop() - - #get all the jobs, then inspect them - i = 0 - for j in jobs.keys(): - job_obj = job.get(dx_session_obj.server_session, jobs[j]) - print_debug(job_obj) - print_info(engine["hostname"] + ": VDB Operations: " + - job_obj.job_state) - - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - #If the job is in a non-running state, remove it from the - # running jobs list. - del jobs[j] - else: - #If the job is in a running state, increment the running - # job count. - i += 1 - - print_info(engine["hostname"] + ": " + str(i) + " jobs running. ") - #If we have running jobs, pause before repeating the checks. - if len(jobs) > 0: - sleep(float(arguments['--poll'])) - - -def run_job(): - """ - This function runs the main_workflow aynchronously against all the servers - specified - """ - #Create an empty list to store threads we create. - threads = [] - - #If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info("Executing against all Delphix Engines in the dxtools.conf") - - try: - #For each server in the dxtools.conf... - for delphix_engine in dx_session_obj.dlpx_engines: - engine = dx_session_obj[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine)) - - except DlpxException as e: - print 'Error encountered in run_job():\n%s' % (e) - sys.exit(1) - - elif arguments['--all'] is False: - #Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dx_session_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: %s\n' % - (arguments['--engine'])) - - except (DlpxException, RequestError, KeyError) as e: - raise DlpxException('\nERROR: Delphix Engine %s cannot be ' - 'found in %s. Please check your value ' - 'and try again. Exiting.\n%s\n' % ( - arguments['--engine'], config_file_path, e)) - - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dx_session_obj.dlpx_engines: - if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ - 'true': - - engine = dx_session_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: %s' % ( - dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) - - break - - if engine == None: - raise DlpxException("\nERROR: No default engine found. Exiting") - - #run the job against the engine - threads.append(main_workflow(engine)) - - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def time_elapsed(): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - """ - elapsed_minutes = round((time() - time_start)/60, +1) - return elapsed_minutes - - -def main(argv): - #We want to be able to call on these variables anywhere in the script. - global single_thread - global usebackup - global time_start - global config_file_path - global database_name - global dx_session_obj - global debug - - if arguments['--debug']: - debug = True - - try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - time_start = time() - single_thread = False - config_file_path = arguments['--config'] - #Parse the dxtools.conf and put it into a dictionary - dx_session_obj.get_config(config_file_path) - - #This is the function that will handle processing main_workflow for - # all the servers. - run_job() - - elapsed_minutes = time_elapsed() - print_info("script took " + str(elapsed_minutes) + - " minutes to get this far.") - - #Here we handle what we do when the unexpected happens - except SystemExit as e: - """ - This is what we use to handle our sys.exit(#) - """ - sys.exit(e) - - except HttpError as e: - """ - We use this exception handler when our connection to Delphix fails - """ - print_exception('Connection failed to the Delphix Engine' - 'Please check the ERROR message below') - sys.exit(1) - - except JobError as e: - """ - We use this exception handler when a job fails in Delphix so that - we have actionable data - """ - elapsed_minutes = time_elapsed() - print_exception('A job failed in the Delphix Engine') - print_info('%s took %s minutes to get this far\n' % - (basename(__file__), str(elapsed_minutes))) - sys.exit(3) - - except KeyboardInterrupt: - """ - We use this exception handler to gracefully handle ctrl+c exits - """ - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info('%s took %s minutes to get this far\n' % - (basename(__file__), str(elapsed_minutes))) - - except: - """ - Everything else gets caught here - """ - print_exception(sys.exc_info()[0]) - elapsed_minutes = time_elapsed() - print_info('%s took %s minutes to get this far\n' % - (basename(__file__), str(elapsed_minutes))) - sys.exit(1) - -if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - #Feed our arguments to the main function, and off we go! - main(arguments) diff --git a/dx_users.py b/dx_users.py deleted file mode 100755 index 187f408..0000000 --- a/dx_users.py +++ /dev/null @@ -1,440 +0,0 @@ -#!/usr/bin/env python -# Adam Bowen - Aug 2017 -#Description: -# This script will allow you to easily manage users in Delphix -# This script currently only supports Native authentication -# -#Requirements -#pip install docopt delphixpy.v1_8_0 - -#The below doc follows the POSIX compliant standards and allows us to use -#this doc to also define our arguments for the script. -"""Description -Usage: - dx_users.py (--user_name [(--add --password --email [--jsonly]) |--delete]) - [--engine | --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - dx_users.py --update --user_name [ --password ] [--email ] [ --delete ] [--jsonly] - [--engine | --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - dx_users.py (--list) - [--engine | --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - dx_users.py -h | --help | -v | --version -Description - -Examples: - dx_users.py --add --user_name dev --password delphix --email "test@something.com" --jsonly - dx_users.py --debug --config delphixpy.v1_8_0-examples/dxtools_1.conf --update --user_name dev --password not_delphix --email "test@somethingelse.com" - dx_users.py --delete --user_name dev - dx_users.py --list - -Options: - --user_name The name of the user - --password The password of the user to be created/updated - --email The email addres of the user to be created/updated - --jsonly Designate the user as a Jet Stream Only User - --add Add the identified user - --update Update the identified user - --delete Delete the identified user - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --debug Enable debug logging - --parallel Limit number of jobs to maxjob - --poll The number of seconds to wait between job polls - [default: 10] - --config The path to the dxtools.conf file - [default: ./dxtools.conf] - --logdir The path to the logfile you want to use. - [default: ./dx_skel.log] - -h --help Show this screen. - -v --version Show version. -""" - -VERSION = 'v.0.0.004' - -import sys -from os.path import basename -from time import sleep, time -from docopt import docopt - -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.web import authorization -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web import user -from delphixpy.v1_8_0.web import role -from delphixpy.v1_8_0.web.vo import Authorization -from delphixpy.v1_8_0.web.vo import User -from delphixpy.v1_8_0.web.vo import PasswordCredential -from delphixpy.v1_8_0.web.vo import CredentialUpdateParameters - -from lib.DlpxException import DlpxException -from lib.DxLogging import logging_est -from lib.DxLogging import print_debug -from lib.DxLogging import print_info -from lib.DxLogging import print_exception -from lib.GetReferences import find_obj_by_name -from lib.GetReferences import find_all_objects -from lib.GetSession import GetSession - -def add_user(user_name, user_password, user_email, jsonly=None): - """ - This function adds the user - """ - user_obj = User() - user_obj.name = user_name - user_obj.email_address = user_email - user_obj.credential = PasswordCredential() - user_obj.credential.password = user_password - - try: - user.create(dx_session_obj.server_session,user_obj) - print('Attempting to create {}'.format(user_name)) - except (DlpxException, RequestError) as e: - print_exception('\nERROR: Creating the user {} ' - 'encountered an error:\n{}'.format(user_name, e)) - sys.exit(1) - - js_only(user_name, jsonly) - -def js_only(user_name, jsonly=None): - """ - Switch the user to/from a jsonly user - """ - user_obj = find_obj_by_name(dx_session_obj.server_session, - user, user_name) - role_obj = find_obj_by_name(dx_session_obj.server_session, - role, "Jet Stream User") - - if jsonly: - authorization_obj = Authorization() - authorization_obj.role = role_obj.reference - authorization_obj.target = user_obj.reference - authorization_obj.user = user_obj.reference - - authorization.create(dx_session_obj.server_session, authorization_obj) - else: - - auth_name = "(" + user_obj.reference + ", " + role_obj.reference + ", " + user_obj.reference + ")" - authorization.delete(dx_session_obj.server_session,find_obj_by_name(dx_session_obj.server_session, - authorization, auth_name).reference) - -def update_user(user_name, user_password=None, user_email=None, jsonly=None): - """ - This function updates the user - """ - - if user_email: - updated_user_obj = User() - updated_user_obj.email_address = user_email - - try: - user.update(dx_session_obj.server_session,find_obj_by_name(dx_session_obj.server_session, - user, user_name).reference,updated_user_obj) - print('Attempting to update {}'.format(user_name)) - except (DlpxException, RequestError) as e: - print_exception('\nERROR: Updating the user {} ' - 'encountered an error:\n{}'.format(user_name, e)) - sys.exit(1) - - if user_password: - new_password_obj = CredentialUpdateParameters() - new_password_obj.new_credential = PasswordCredential() - new_password_obj.new_credential.password = user_password - - try: - user.update_credential(dx_session_obj.server_session,find_obj_by_name(dx_session_obj.server_session, - user, user_name).reference,new_password_obj) - print('Attempting to update {} password'.format(user_name)) - except (DlpxException, RequestError) as e: - print_exception('\nERROR: Updating the user {} password ' - 'encountered an error:\n{}'.format(user_name, e)) - sys.exit(1) - - js_only(user_name, jsonly) - -def delete_user(user_name): - """ - This function adds the user - """ - user_obj = find_obj_by_name(dx_session_obj.server_session, - user, user_name) - - - try: - user.delete(dx_session_obj.server_session,user_obj.reference) - print('Attempting to delete {}'.format(user_name)) - except (DlpxException, RequestError) as e: - print_exception('\nERROR: Deleting the user {} ' - 'encountered an error:\n{}'.format(user_name, e)) - sys.exit(1) - -def list_users(): - """ - This function lists all users - """ - user_list = find_all_objects(dx_session_obj.server_session, user) - - for user_obj in user_list: - print('User: {}'.format(user_obj.name)) - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - E.g.: - @run_async - def task1(): - do_something - @run_async - def task2(): - do_something_too - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func - - -@run_async -def main_workflow(engine): - """ - This function actually runs the jobs. - Use the @run_async decorator to run this function asynchronously. - This allows us to run against multiple Delphix Engine simultaneously - - engine: Dictionary of engines - """ - try: - #Setup the connection to the Delphix Engine - dx_session_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - - except DlpxException as e: - print_exception('\nERROR: Engine {} encountered an error while' - '{}:\n{}\n'.format(engine['hostname'], - arguments['--target'], e)) - sys.exit(1) - - thingstodo = ["thingtodo"] - try: - with dx_session_obj.job_mode(single_thread): - while (len(dx_session_obj.jobs) > 0 or len(thingstodo)> 0): - if len(thingstodo) > 0: - if arguments['--add'] : - add_user(arguments['--user_name'], arguments['--password'], arguments['--email'], arguments['--jsonly']) - elif arguments['--update'] : - update_user(arguments['--user_name'], arguments['--password'], arguments['--email'], arguments['--jsonly']) - elif arguments['--delete']: - delete_user(arguments['--user_name']) - elif arguments['--list']: - list_users() - thingstodo.pop() - # get all the jobs, then inspect them - i = 0 - for j in dx_session_obj.jobs.keys(): - job_obj = job.get(dx_session_obj.server_session, - dx_session_obj.jobs[j]) - print_debug(job_obj) - print_info('{}: User: {}'.format( - engine['hostname'], job_obj.job_state)) - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - # If the job is in a non-running state, remove it - # from the - # running jobs list. - del dx_session_obj.jobs[j] - elif job_obj.job_state in 'RUNNING': - # If the job is in a running state, increment the - # running job count. - i += 1 - print_info('{}: {:d} jobs running.'.format( - engine['hostname'], i)) - # If we have running jobs, pause before repeating the - # checks. - if len(dx_session_obj.jobs) > 0: - sleep(float(arguments['--poll'])) - - except (HttpError, RequestError, JobError, DlpxException) as e: - print_exception('ERROR: Could not complete user ' - 'operation: {}'.format(e)) - - -def run_job(): - """ - This function runs the main_workflow aynchronously against all the servers - specified - """ - #Create an empty list to store threads we create. - threads = [] - engine = None - - #If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info("Executing against all Delphix Engines in the dxtools.conf") - - try: - #For each server in the dxtools.conf... - for delphix_engine in dx_session_obj.dlpx_engines: - engine = dx_session_obj[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine)) - - except DlpxException as e: - print 'Error encountered in run_job():\n{}'.format(e) - sys.exit(1) - - elif arguments['--all'] is False: - #Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dx_session_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: {}\n'.format( - (arguments['--engine']))) - - except (DlpxException, RequestError, KeyError) as e: - raise DlpxException('\nERROR: Delphix Engine {} cannot be ' - 'found in {}. Please check your value ' - 'and try again. Exiting.\n'.format( - arguments['--engine'], config_file_path)) - - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dx_session_obj.dlpx_engines: - if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ - 'true': - - engine = dx_session_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: {}'.format( - dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) - - break - - if engine == None: - raise DlpxException("\nERROR: No default engine found. Exiting") - - #run the job against the engine - threads.append(main_workflow(engine)) - - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def time_elapsed(): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - """ - #elapsed_minutes = round((time() - time_start)/60, +1) - #return elapsed_minutes - return round((time() - time_start)/60, +1) - - -def main(arguments): - #We want to be able to call on these variables anywhere in the script. - global single_thread - global usebackup - global time_start - global config_file_path - global dx_session_obj - global debug - - if arguments['--debug']: - debug = True - - try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - time_start = time() - single_thread = False - config_file_path = arguments['--config'] - #Parse the dxtools.conf and put it into a dictionary - dx_session_obj.get_config(config_file_path) - - #This is the function that will handle processing main_workflow for - # all the servers. - run_job() - - elapsed_minutes = time_elapsed() - print_info('script took {:.2f} minutes to get this far.'.format( - elapsed_minutes)) - - #Here we handle what we do when the unexpected happens - except DlpxException as e: - print_exception('script encountered an error while processing the' - 'config file:\n{}'.format(e)) - - except SystemExit as e: - """ - This is what we use to handle our sys.exit(#) - """ - sys.exit(e) - - except HttpError as e: - """ - We use this exception handler when our connection to Delphix fails - """ - print_exception('Connection failed to the Delphix Engine' - 'Please check the ERROR message:\n{}'.format(e)) - sys.exit(1) - - except JobError as e: - """ - We use this exception handler when a job fails in Delphix so that - we have actionable data - """ - elapsed_minutes = time_elapsed() - print_exception('A job failed in the Delphix Engine') - print_info('{} took {:.2f} minutes to get this far\n{}'.format( - basename(__file__), elapsed_minutes, e)) - sys.exit(3) - - except KeyboardInterrupt: - """ - We use this exception handler to gracefully handle ctrl+c exits - """ - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far\n'.format( - basename(__file__), elapsed_minutes)) - - except: - """ - Everything else gets caught here - """ - print_exception(sys.exc_info()[0]) - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far\n'.format( - basename(__file__), elapsed_minutes)) - sys.exit(1) - -if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - #Feed our arguments to the main function, and off we go! - main(arguments) diff --git a/dxtools.conf b/dxtools.conf deleted file mode 100755 index 68e331d..0000000 --- a/dxtools.conf +++ /dev/null @@ -1,13 +0,0 @@ -{ - "data":[ - { - "hostname":"landsharkengine", - "ip_address":"172.16.169.146", - "username":"delphix_admin", - "password":"delphix", - "port":"80", - "default":"true", - "encrypted":"false" - } - ] -} diff --git a/engine_network_assignment b/engine_network_assignment deleted file mode 100644 index aeb9b96..0000000 --- a/engine_network_assignment +++ /dev/null @@ -1,182 +0,0 @@ -#!/usr/bin/expect -f -#Adam Bowen - 2015 -#engine_network_assignment -#Many thanks for the examples given at https://www.pantz.org/software/expect/expect_examples_and_tips.html -##################################################################### -set version 2.3.005 -set DESCRIPTION "This script sets the static IP address and dns information for the Delphix Engine" -set timeout 20 -set BOLD "\033\[01m" -set NORM "\033\[0m" -set enginename [lindex $argv 0] -set engineip [lindex $argv 1] -set username [lindex $argv 2] -set password [lindex $argv 3] -set network [lindex $argv 4] -set gateway [lindex $argv 5] -set dnsserver [lindex $argv 6] -set reboot [lindex $argv 7] -set change false - -#log_user 0 - -if {[llength $argv] < 7} { - send_user "Usage: $argv0 enginename engineip username \'password\' network_cidr gateway dnsserver reboot(Y or N)\n" - exit 1 -} - -send_user "\n#####\n#${BOLD}Welcome to $argv0, v.$version${NORM}" -send_user "\n#$DESCRIPTION" -send_user "\n#This script will update the network settings to the following:" -send_user "\n#dhcp=false" -send_user "\n#ip address = $engineip/$network" -send_user "\n#default gateway = $gateway" -send_user "\n#dnsserver = $dnsserver" -send_user "\n#####\n" - -send_user "\nConnecting to $engineip\n" -#spawn ssh -q -o ConnectTimeout=30 -o "StrictHostKeyChecking no" $username@$engineip -spawn ssh -o "StrictHostKeyChecking no" $username@$engineip - -expect { - timeout { send_user "\nFailed to get password prompt\n"; exit 1 } - eof { send_user "\nSSH failure for $engineip\n"; exit 1 } - "Password: " { - send "$password\r" - expect { - timeout { send_user "\nLogin failed. Password incorrect.\ngot '$expect_out(buffer)'\n"; exit 1} - "*network setup update*" {} - "*>*" { - send "/network/setup/update\r" - expect { - default { send_user "\nFailed to get the network update prompt. Ensure you are logged in as a user with sysadmin privileges\n"; exit 1} - "*network setup update *> " - } - } - } - } - "${enginename}> " -} - -send_user "\nSuccessfully logged into the Delphix Engine\n" -#send "/version 1.6.1\r" -#send "/network/setup/update\r" - - - -send_user "\nUpdating network settings...\n" - -send "/network/setup/ls\r" - -expect -re {Properties.*defaultRoute: (.*)\r\n.*dhcp: (.*)\r\n.*dnsDomain: (.*)\r\n.*dnsServers: (.*)\r\n.*hostname: (.*)\r\n.*primaryAddress: ([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}/[0-9]{2}).*\r\n.*} { - set defaultRoute $expect_out(1,string) - set dhcp $expect_out(2,string) - set dnsDomain $expect_out(3,string) - set dnsServers $expect_out(4,string) - set hostname $expect_out(5,string) - set primaryAddress $expect_out(6,string) -} - -puts "\n" -puts "defaultRoute: $defaultRoute" -puts "dhcp: $dhcp" -puts "dnsDomain: $dnsDomain" -puts "dnsServers: $dnsServers" -puts "hostname: $hostname" -puts "primaryAddress: $primaryAddress" - -if {$dhcp=="true"} { - send_user "\nTurning off dhcp\n" - send "set dhcp=false\r" - expect "$hostname network setup update *> " - set change true -} else { - send_user "\ndhcp already off" -} - -if {$hostname!=$enginename} { - send_user "\nSetting hostname\n" - send "set hostname=$enginename\r" - expect "$hostname network setup update *> " - set change true -} else { - send_user "\nhostname already set to correct value" -} - -if {$defaultRoute!=$gateway} { - send_user "\nSetting defaultRoute\n" - send "set defaultRoute=$gateway\r" - expect "$hostname network setup update *> " - set change true -} else { - send_user "\ndefaultRoute already set to correct value" -} - -if {$dnsDomain!="delphix.local"} { - send_user "\nSetting dnsDomain\n" - send "set dnsDomain=delphix.local\r" - expect "$hostname network setup update *> " - set change true -} else { - send_user "\ndnsDomain already set to correct value" -} - -if {$dnsServers!=$dnsserver} { - send_user "\nSetting dnsServers\n" - send "set dnsServers=$dnsserver\r" - expect "$hostname network setup update *> " - set change true -} else { - send_user "\ndnsServers already set to correct value" -} - -if {"$primaryAddress"!="$engineip/$network"} { - #puts "" - #puts "HERE_ $primaryAddress _HERE" - #puts "HERE_ $engineip/$network _HERE" - #exit 1 - send_user "\nSetting primaryAddress\n" - send "set primaryAddress=$engineip/$network\r" - expect "$hostname network setup update *> " - set change true -} else { - send_user "\nprimaryAddress already set to correct value" -} - -if {$change==true} { - send_user "\nCommitting changes\n" - send "commit\r" -} else { - send_user "\nNothing to change\n" - send "discard\r" -} - - -expect "${enginename}> " - -send "/network/setup/ls\r" - -expect -re "(Properties)(.*defaultRoute.*)(Operations.*)" - -set output $expect_out(2,string) - -puts "$BOLD New Engine Network Settings\n$output $NORM \n" - -if {$reboot=="Y"} { - - send_user "\nRebooting Engine...\n" - - send "/system/reboot\r" - - expect "$enginename system reboot *>" - - send "commit\r" - -expect "Rebooting system. The current session will be terminated." - -} else { - - send "exit\r" -} - -send_user "$argv0 is finished\n\n" diff --git a/engine_network_assignment.py b/engine_network_assignment.py deleted file mode 100755 index 370f919..0000000 --- a/engine_network_assignment.py +++ /dev/null @@ -1,243 +0,0 @@ -#!/usr/bin/env python -''' -Adam Bowen - Jan 2016 -This script configures the Delphix Engine networking. -''' -VERSION="v.2.3.002" - -import getopt -import logging -from os.path import basename -import signal -import sys -import time -import traceback - -import errno -from socket import error as socket_error - -from delphixpy.v1_8_0.delphix_engine import DelphixEngine -from delphixpy.v1_8_0.exceptions import HttpError, JobError -from delphixpy.v1_8_0.web import network, system, user, service -from delphixpy.v1_8_0.web.vo import PasswordCredential, User, NetworkInterface, \ - InterfaceAddress, DNSConfig, SystemInfo, NetworkRoute - -def system_serversess(f_engine_address, f_engine_username, f_engine_password): - ''' - Function to grab the server session - ''' - server_session= DelphixEngine(f_engine_address, f_engine_username, \ - f_engine_password, "SYSTEM") - return server_session - -def help(): - print( basename(__file__)+ ' [-e ] [-o - The IP to use to connect to the Delphix ' - 'Engine. \nEngine must be up, unconfigured, and console screen must be ' - 'green') - print('-p - will set the sysadmin user to this ' - 'password') - print('-n - will set the Delphix Engine to this ' - 'IP address \n(i.e. 10.0.1.10/24)') - print('-g - will set the default gateway to point to ' - 'this \nIP address') - print('-d - comma delimited string of dns servers to use \n' - '(i.e. \"4.2.2.2,192.168.2.1\"")') - print("-v - Print version information and exit") - sys.exit(2) - -def logging_est(): - ''' - Establish Logging - ''' - global debug - logging.basicConfig(filename='landshark_setup.log',\ - format='%(levelname)s:\%(asctime)s:%(message)s', level=logging.INFO, \ - datefmt='%Y-%m-%d %H:%M:%S') - print_info("Welcome to " + basename(__file__) + ", version " + VERSION) - global logger - debug = True - logger = logging.getLogger() - logger.setLevel(10) - print_info("Debug Logging is enabled.") - -def on_exit(sig, func=None): - print_info("Shutdown Command Received") - print_info("Shutting down prime_setup.py") - sys.exit(0) - -def print_debug(print_obj): - ''' - DEBUG Log-level - ''' - if debug == True: - print "DEBUG: " + str(print_obj) - logging.debug(str(print_obj)) - -def print_error(print_obj): - ''' - ERROR Log-level - ''' - print "ERROR: " + str(print_obj) - logging.error(str(print_obj)) - -def print_info(print_obj): - ''' - INFO Log-level - ''' - print "INFO: " + str(print_obj) - logging.info(str(print_obj)) - -def print_warning(print_obj): - ''' - WARNING Log-level - ''' - print "WARNING: " + str(print_obj) - logging.warning(str(print_obj)) - -def set_exit_handler(func): - signal.signal(signal.SIGTERM, func) - -def time_elapsed(): - elapsed_minutes = round((time.time() - time_start)/60, +1) - return elapsed_minutes - -def version(): - print("Version: " +VERSION) - logging_est() - set_exit_handler(on_exit) - sys.exit(1) - -def main(argv): - try: - logging_est() - global time_start - time_start = time.time() - engine_ip = "" - engine_pass = "" - dg = "" - dns_servers = "" - try: - opts,args = getopt.getopt(argv,"e:n:g:d:p:hv") - except getopt.GetoptError: - help() - for opt, arg in opts: - if opt == '-h': - help() - elif opt == '-e': - engine_ip = arg - elif opt == '-p': - engine_pass = arg - elif opt == '-n': - new_engine_cidr = arg - elif opt == '-g': - dg = arg - elif opt == '-d': - dns_servers = arg - elif opt == '-v': - version() - - if (engine_ip == "" or engine_pass == "" or new_engine_cidr == "" or \ - dg == "" or dns_servers == "") : - help() - - sys_server = system_serversess(engine_ip, "sysadmin", engine_pass) - - #Configure Static IP - primary_interface = network.interface.get_all(sys_server)[0].reference - print_debug("Primary interface identified as " + primary_interface) - ni_obj = NetworkInterface() - if_obj = InterfaceAddress() - if_obj.address = new_engine_cidr - if_obj.address_type = "STATIC" - #if_obj.addressType = "DHCP" - ni_obj.addresses = [if_obj] - #print_debug(str(ni_obj)) - try: - print_debug("Changing the IP address. This operation can take up to 60 seconds to complete") - network.interface.update(sys_server, primary_interface, ni_obj) - except socket_error as e: - if e.errno == errno.ETIMEDOUT: - print_debug("IP address changed") - else: - raise e - #if we made it this far, we need to operate on the new IP. - engine_ip = new_engine_cidr.split('/')[0] - print_debug("ENGINE IP: " + engine_ip) - #Now re-establish the server session - sys_server = system_serversess(engine_ip, "sysadmin", engine_pass) - - #configure DNS - print_debug("Setting DNS") - dns_obj = DNSConfig() - dns_obj.servers = dns_servers.split(",") - dns_obj.domain = [] - service.dns.set(sys_server, dns_obj) - - #configue default gateway - print_debug("Setting default gateway") - de_routes = network.route.get_all(sys_server) - print_debug("Current routes: ") - print_debug(str(de_routes)) - default_gateway = NetworkRoute() - default_gateway.destination = "default" - default_gateway.out_interface = primary_interface - #Check to see if a DG already exists. If so, delete it. - for de_route in de_routes: - if de_route.destination == 'default': - print_debug("Found an existing DG. Deleting it") - default_gateway.gateway = dg - network.route.delete(sys_server, default_gateway) - default_gateway.gateway = dg - print_debug("Adding new route") - network.route.add(sys_server, default_gateway) - de_routes = network.route.get_all(sys_server) - print_debug("New routes: ") - print_debug(str(de_routes)) - - - except SystemExit as e: - sys.exit(e) - except HttpError as e: - print_error("Connection failed to the Delphix Engine") - print_error( "Please check the ERROR message below") - print_error(e.message) - sys.exit(2) - except JobError as e: - print_error("A job failed in the Delphix Engine") - print_error(e.job) - elapsed_minutes = time_elapsed() - print_info("Prime took " + str(elapsed_minutes) + \ - " minutes to get this far.") - sys.exit(2) - except KeyboardInterrupt: - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info("Prime took " + str(elapsed_minutes) + \ - " minutes to get this far.") - sys.exit(2) - except socket_error as e: - print_error("Connection failed to the Delphix Engine") - print_error( "Please check the ERROR message below") - if e.errno == errno.ETIMEDOUT: - print_debug("Connection timed out trying to connect to " \ - + engine_ip) - else: - print_error(e.message) - sys.exit(2) - except: - print_error(sys.exc_info()[0]) - print_error(traceback.format_exc()) - elapsed_minutes = time_elapsed() - print_info("Prime took " + str(elapsed_minutes) + \ - " minutes to get this far.") - sys.exit(2) - -if __name__ == "__main__": - main(sys.argv[1:]) diff --git a/engine_setup.py b/engine_setup.py deleted file mode 100755 index 718d1f5..0000000 --- a/engine_setup.py +++ /dev/null @@ -1,203 +0,0 @@ -#!/usr/bin/env python -''' -Adam Bowen - Jan 2016 -This script configures the sysadmin user and configures domain0 -Will come back and properly throw this with logging, etc -''' -VERSION="v.2.3.005" -CONTENTDIR="/u02/app/content" - -import getopt -import logging -from os.path import basename -import signal -import sys -import time -import traceback -import untangle - -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.exceptions import HttpError,JobError -from delphixpy.v1_6_0.web import domain, storage, user -from delphixpy.v1_6_0.web.vo import CredentialUpdateParameters, PasswordCredential, DomainCreateParameters, User -from lib.GetSession import GetSession - -def system_serversess(f_engine_address, f_engine_username, f_engine_password): - ''' - Function to grab the server session - ''' - server_session= DelphixEngine(f_engine_address, f_engine_username, f_engine_password, "SYSTEM") - return server_session - -def help(): - print("\n" + basename(__file__)+ " [-e ] [-o - Engine must be up, unconfigured, and console screen must be green") - print("-o - will use this password to initially access the system") - print("-p - will set the sysadmin user to this password") - print("-v - Print version information and exit") - sys.exit(2) - -def logging_est(): - ''' - Establish Logging - ''' - global debug - logging.basicConfig(filename='landshark_setup.log',format='%(levelname)s:%(asctime)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') - print_info("Welcome to " + basename(__file__) + ", version " + VERSION) - global logger - debug = True - logger = logging.getLogger() - logger.setLevel(10) - print_info("Debug Logging is enabled.") - -def on_exit(sig, func=None): - print_info("Shutdown Command Received") - print_info("Shutting down prime_setup.py") - sys.exit(0) - -def print_debug(print_obj): - ''' - DEBUG Log-level - ''' - if debug == True: - print "DEBUG: " + str(print_obj) - logging.debug(str(print_obj)) - -def print_error(print_obj): - ''' - ERROR Log-level - ''' - print "ERROR: " + str(print_obj) - logging.error(str(print_obj)) - -def print_info(print_obj): - ''' - INFO Log-level - ''' - print "INFO: " + str(print_obj) - logging.info(str(print_obj)) - -def print_warning(print_obj): - ''' - WARNING Log-level - ''' - print "WARNING: " + str(print_obj) - logging.warning(str(print_obj)) - -def set_exit_handler(func): - signal.signal(signal.SIGTERM, func) - -def time_elapsed(): - elapsed_minutes = round((time.time() - time_start)/60, +1) - return elapsed_minutes - -def version(): - print("Version: " +VERSION) - logging_est() - set_exit_handler(on_exit) - sys.exit(1) - -def main(argv): - try: - logging_est() - global time_start - time_start = time.time() - dx_session_obj = GetSession() - engine_ip = "" - engine_pass = "" - old_engine_pass = "" - try: - opts,args = getopt.getopt(argv,"e:o:p:hv") - except getopt.GetoptError: - help() - for opt, arg in opts: - if opt == '-h': - help() - elif opt == '-e': - engine_ip = arg - elif opt == '-o': - old_engine_pass = arg - elif opt == '-p': - engine_pass = arg - elif opt == '-v': - version() - - if (engine_ip == "" or engine_pass == "" or old_engine_pass == "") : - help() - - dx_session_obj.serversess(engine_ip, 'sysadmin', - old_engine_pass, 'SYSTEM') - - dx_session_obj.server_wait() - - sys_server = system_serversess(engine_ip, "sysadmin", old_engine_pass) - - if user.get(sys_server, "USER-1").email_address == None: - print_info("Setting sysadmin's email address") - sysadmin_user = User() - sysadmin_user.email_address = "spam@delphix.com" - user.update(sys_server, 'USER-1', sysadmin_user) - print_info("Setting sysadmin's password") - sysadmin_credupdate = CredentialUpdateParameters() - sysadmin_credupdate.new_credential = PasswordCredential() - sysadmin_credupdate.new_credential.password = engine_pass - user.update_credential(sys_server, 'USER-1', sysadmin_credupdate) - else: - print_info("sysadmin user has already been configured") - - try: - sys_server = system_serversess(engine_ip, "sysadmin", engine_pass) - domain.get(sys_server) - print_info("domain0 already exists. Skipping domain0 creation.") - elapsed_minutes = time_elapsed() - print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") - sys.exit(7) - except HttpError as e: - device_list = storage.device.get_all(sys_server) - system_init_params = DomainCreateParameters() - system_init_params.devices = [ device.reference for device in device_list if not device.configured ] - print_info("Creating storage domain") - domain.set(sys_server, system_init_params) - while True: - try: - sys_server = system_serversess(engine_ip, "sysadmin", engine_pass) - domain.get(sys_server) - except: - break - print_info("Waiting for Delphix Engine to go down") - time.sleep(3) - - dx_session_obj.serversess(engine_ip, 'sysadmin', - engine_pass, 'SYSTEM') - - dx_session_obj.server_wait() - - except SystemExit as e: - sys.exit(e) - except HttpError as e: - print_error("Connection failed to the Delphix Engine") - print_error( "Please check the ERROR message below") - print_error(e.message) - sys.exit(2) - except JobError as e: - print_error("A job failed in the Delphix Engine") - print_error(e.job) - elapsed_minutes = time_elapsed() - print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") - sys.exit(2) - except KeyboardInterrupt: - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") - sys.exit(2) - except: - print_error(sys.exc_info()[0]) - print_error(traceback.format_exc()) - elapsed_minutes = time_elapsed() - print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") - sys.exit(2) - -if __name__ == "__main__": - main(sys.argv[1:]) \ No newline at end of file diff --git a/find_missing_archivelogs.py b/find_missing_archivelogs.py deleted file mode 100755 index cab392f..0000000 --- a/find_missing_archivelogs.py +++ /dev/null @@ -1,339 +0,0 @@ -#!/usr/bin/env python -# Corey Brune - March 2017 -#Description: -# Adapted from Tad Martin's bash script -# -#Requirements -#pip install docopt delphixpy - -#The below doc follows the POSIX compliant standards and allows us to use -#this doc to also define our arguments for the script. -"""Description -Usage: - find_missing_archivelogs.py --outdir - [--engine | --all] - [--debug] [--parallel ] [--poll ] - [--config ] [--logdir ] - find_missing_archivelogs.py -h | --help | -v | --version -Description - Find missing archive logs for each engine - -Examples: - find_missing_archivelogs.py --outdir /var/tmp - - -Options: - --outdir Directory for the output files - --engine Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --debug Enable debug logging - --parallel Limit number of jobs to maxjob - --poll The number of seconds to wait between job polls - [default: 10] - --config The path to the dxtools.conf file - [default: ./dxtools.conf] - --logdir The path to the logfile you want to use. - [default: ./dx_operations_vdb.log] - -h --help Show this screen. - -v --version Show version. -""" - -VERSION = 'v.0.0.005' - -import sys -from os.path import basename -from time import sleep, time -from docopt import docopt - -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web import source -from delphixpy.v1_8_0.web import database -from delphixpy.v1_8_0.web.timeflow import oracle - - -from lib.DlpxException import DlpxException -from lib.DxLogging import logging_est -from lib.DxLogging import print_debug -from lib.DxLogging import print_info -from lib.DxLogging import print_exception -from lib.GetReferences import find_all_objects -from lib.GetReferences import find_obj_by_name -from lib.GetSession import GetSession - - -def find_missing_archivelogs(hostname): - """ - Function to find missing archive log files for Oracle dSources. - """ - print 'Now working on engine {}.'.format(hostname) - - log_file = open('{}/{}.csv'.format(arguments['--outdir'], hostname), 'a+') - - log_file.write('InstanceNumber,Sequence,StartSCN,EndSCN\n') - src_objs = find_all_objects(dx_session_obj.server_session, source) - - for src_obj in src_objs: - if src_obj.virtual is False and src_obj.type == 'OracleLinkedSource': - ora_logs = oracle.log.get_all(dx_session_obj.server_session, - database=find_obj_by_name( - dx_session_obj.server_session, - database, src_obj.name).reference, - missing=True, page_size=1000) - - if ora_logs: - for log_data in ora_logs: - log_file.write('{}, {}, {}, {}, {}, {}\n'.format( - src_obj.name, log_data.instance_num, - log_data.instance_num, log_data.sequence, - log_data.start_scn, log_data.end_scn)) - elif not ora_logs: - log_file.write('{} has no missing files.\n'.format( - src_obj.name)) - log_file.close() - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - E.g.: - @run_async - def task1(): - do_something - @run_async - def task2(): - do_something_too - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func - - -@run_async -def main_workflow(engine): - """ - This function actually runs the jobs. - Use the @run_async decorator to run this function asynchronously. - This allows us to run against multiple Delphix Engine simultaneously - - engine: Dictionary of engines - """ - jobs = {} - - try: - #Setup the connection to the Delphix Engine - dx_session_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - - except DlpxException as e: - print_exception('\nERROR: Engine {} encountered an error while' - '{}:\n{}\n'.format(engine['hostname'], - arguments['--target'], e)) - sys.exit(1) - - thingstodo = ["thingtodo"] - #reset the running job count before we begin - i = 0 - with dx_session_obj.job_mode(single_thread): - while (len(jobs) > 0 or len(thingstodo)> 0): - if len(thingstodo)> 0: - - #if OPERATION: - find_missing_archivelogs(engine['hostname']) - - thingstodo.pop() - - #get all the jobs, then inspect them - i = 0 - for j in jobs.keys(): - job_obj = job.get(dx_session_obj.server_session, jobs[j]) - print_debug(job_obj) - print_info('{}: VDB Operations:{}\n'.format(engine['hostname'], - job_obj.job_state)) - - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - #If the job is in a non-running state, remove it from the - # running jobs list. - del jobs[j] - else: - #If the job is in a running state, increment the running - # job count. - i += 1 - - print_info(engine["hostname"] + ": " + str(i) + " jobs running. ") - #If we have running jobs, pause before repeating the checks. - if len(jobs) > 0: - sleep(float(arguments['--poll'])) - - -def run_job(): - """ - This function runs the main_workflow aynchronously against all the servers - specified - """ - #Create an empty list to store threads we create. - threads = [] - engine = None - - #If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info('Executing against all Delphix Engines in the dxtools.conf') - - try: - #For each server in the dxtools.conf... - for delphix_engine in dx_session_obj.dlpx_engines: - engine = dx_session_obj[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine)) - - except DlpxException as e: - print_exception('Error encountered in run_job():\n{}'.format(e)) - sys.exit(1) - - elif arguments['--all'] is False: - #Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dx_session_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: {}\n'.format( - (arguments['--engine']))) - - except (DlpxException, RequestError, KeyError) as e: - print_exception('\nERROR: Delphix Engine {} cannot be ' - 'found in {}. Please check your value ' - 'and try again. Exiting.\n{}'.format( - arguments['--engine'], config_file_path, e)) - - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dx_session_obj.dlpx_engines: - if dx_session_obj.dlpx_engines[delphix_engine]['default'] == \ - 'true': - engine = dx_session_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: {}'.format( - dx_session_obj.dlpx_engines[delphix_engine]['hostname'])) - break - - if engine is None: - print_exception('\nERROR: No default engine found. Exiting\n') - - #run the job against the engine - threads.append(main_workflow(engine)) - - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def time_elapsed(): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - """ - elapsed_minutes = round((time() - time_start)/60, +1) - return elapsed_minutes - - -def main(arguments): - #We want to be able to call on these variables anywhere in the script. - global single_thread - global usebackup - global time_start - global config_file_path - global dx_session_obj - global debug - - if arguments['--debug']: - debug = True - - try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - time_start = time() - engine = None - single_thread = False - config_file_path = arguments['--config'] - #Parse the dxtools.conf and put it into a dictionary - dx_session_obj.get_config(config_file_path) - - #This is the function that will handle processing main_workflow for - # all the servers. - run_job() - - elapsed_minutes = time_elapsed() - print_info('script took {:.2f} minutes to get this far.'.format( - elapsed_minutes)) - - #Here we handle what we do when the unexpected happens - except SystemExit as e: - """ - This is what we use to handle our sys.exit(#) - """ - sys.exit(e) - - except HttpError as e: - """ - We use this exception handler when our connection to Delphix fails - """ - print_exception('Connection failed to the Delphix Engine' - 'Please check the ERROR message below') - sys.exit(1) - - except JobError as e: - """ - We use this exception handler when a job fails in Delphix so that - we have actionable data - """ - elapsed_minutes = time_elapsed() - print_exception('A job failed in the Delphix Engine') - print_info('{} took {:.2f} minutes to get this far:\n{}\n'.format( - basename(__file__), elapsed_minutes, e)) - sys.exit(3) - - except KeyboardInterrupt: - """ - We use this exception handler to gracefully handle ctrl+c exits - """ - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far\n'.format( - basename(__file__), elapsed_minutes)) - - except: - """ - Everything else gets caught here - """ - print_exception(sys.exc_info()[0]) - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far\n'.format( - basename(__file__), elapsed_minutes)) - sys.exit(1) - -if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - #Feed our arguments to the main function, and off we go! - main(arguments) diff --git a/get_engine_pub_key.py b/get_engine_pub_key.py deleted file mode 100755 index bda79bb..0000000 --- a/get_engine_pub_key.py +++ /dev/null @@ -1,169 +0,0 @@ -#!/usr/bin/env python -''' -Adam Bowen - May 2017 -This script grabs -''' -VERSION="v.2.3.003" - -import getopt -import logging -from os.path import basename -import signal -import sys -import time -import traceback -import untangle - -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.exceptions import HttpError,JobError -from delphixpy.v1_6_0.web import system -from lib.GetSession import GetSession - - -def system_serversess(f_engine_address, f_engine_username, f_engine_password): - ''' - Function to grab the server session - ''' - server_session= DelphixEngine(f_engine_address, f_engine_username, f_engine_password, "SYSTEM") - return server_session - -def help(): - print("\n" + basename(__file__)+ " [-e ] [-p - Engine must be up and console screen must be green") - print("-p - sysadmin password") - print("-d - directory where key will be saved") - print("-v - Print version information and exit") - sys.exit(2) - -def logging_est(): - ''' - Establish Logging - ''' - global debug - logging.basicConfig(filename='landshark_setup.log',format='%(levelname)s:%(asctime)s:%(message)s', level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') - print_info("Welcome to " + basename(__file__) + ", version " + VERSION) - global logger - debug = True - logger = logging.getLogger() - logger.setLevel(10) - print_info("Debug Logging is enabled.") - -def on_exit(sig, func=None): - print_info("Shutdown Command Received") - print_info("Shutting down prime_setup.py") - sys.exit(0) - -def print_debug(print_obj): - ''' - DEBUG Log-level - ''' - if debug == True: - print "DEBUG: " + str(print_obj) - logging.debug(str(print_obj)) - -def print_error(print_obj): - ''' - ERROR Log-level - ''' - print "ERROR: " + str(print_obj) - logging.error(str(print_obj)) - -def print_info(print_obj): - ''' - INFO Log-level - ''' - print "INFO: " + str(print_obj) - logging.info(str(print_obj)) - -def print_warning(print_obj): - ''' - WARNING Log-level - ''' - print "WARNING: " + str(print_obj) - logging.warning(str(print_obj)) - -def set_exit_handler(func): - signal.signal(signal.SIGTERM, func) - -def time_elapsed(): - elapsed_minutes = round((time.time() - time_start)/60, +1) - return elapsed_minutes - -def version(): - print("Version: " +VERSION) - logging_est() - set_exit_handler(on_exit) - sys.exit(1) - -def main(argv): - try: - logging_est() - global time_start - time_start = time.time() - dx_session_obj = GetSession() - engine_ip = "" - engine_pass = "" - old_engine_pass = "" - try: - opts,args = getopt.getopt(argv,"e:d:p:hv") - except getopt.GetoptError: - help() - for opt, arg in opts: - if opt == '-h': - help() - elif opt == '-e': - engine_ip = arg - elif opt == '-p': - engine_pass = arg - elif opt == '-d': - key_path = arg + '/engine_key.pub' - elif opt == '-v': - version() - - if (engine_ip == "" or engine_pass == "") : - help() - - dx_session_obj.serversess(engine_ip, 'sysadmin', - engine_pass, 'SYSTEM') - dx_session_obj.server_wait() - - sys_server = system_serversess(engine_ip, "sysadmin", engine_pass) - system_info = system.get(sys_server) - print_info(system_info.ssh_public_key) - print_info("Writing to " + key_path) - target = open(key_path, 'w') - target.write(system_info.ssh_public_key) - target.close - print_info("File saved") - elapsed_minutes = time_elapsed() - print_info("Script took " + str(elapsed_minutes) + " minutes to get this far.") - - except SystemExit as e: - sys.exit(e) - except HttpError as e: - print_error("Connection failed to the Delphix Engine") - print_error( "Please check the ERROR message below") - print_error(e.message) - sys.exit(2) - except JobError as e: - print_error("A job failed in the Delphix Engine") - print_error(e.job) - elapsed_minutes = time_elapsed() - print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") - sys.exit(2) - except KeyboardInterrupt: - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") - sys.exit(2) - except: - print_error(sys.exc_info()[0]) - print_error(traceback.format_exc()) - elapsed_minutes = time_elapsed() - print_info("Prime took " + str(elapsed_minutes) + " minutes to get this far.") - sys.exit(2) - -if __name__ == "__main__": - main(sys.argv[1:]) \ No newline at end of file diff --git a/js_bookmark.py b/js_bookmark.py deleted file mode 100755 index 93714fd..0000000 --- a/js_bookmark.py +++ /dev/null @@ -1,552 +0,0 @@ -#!/usr/bin/env python -# Program Name : js_bookmark.py -# Description : Delphix implementation script -# Author : Corey Brune -# Created: March 4 2016 -# -# Copyright (c) 2016 by Delphix. -# All rights reserved. -# See http://docs.delphix.com/display/PS/Copyright+Statement for details -# -# Delphix Support statement available at -# See http://docs.delphix.com/display/PS/PS+Script+Support+Policy for details -# -# Warranty details provided in external file -# for customers who have purchased support. -# -"""Creates, lists, removes a Jet Stream Bookmark -Usage: - js_bookmark.py (--create_bookmark --data_layout [--tags --description --branch_name ]| --list_bookmarks [--tags ] | --delete_bookmark | --activate_bookmark | --update_bookmark | --share_bookmark | --unshare_bookmark ) - [--engine | --all] [--parallel ] - [--poll ] [--debug] - [--config ] [--logdir ] - js_bookmark.py -h | --help | -v | --version - -Creates, Lists, Removes a Jet Stream Bookmark - -Examples: - js_bookmark.py --list_bookmarks - js_bookmark.py --list_bookmarks --tags "Jun 17, 25pct" - js_bookmark.py --create_bookmark jsbookmark1 --data_layout jstemplate1 - js_bookmark.py --create_bookmark jsbookmark1 --data_layout jstemplate1 --tags "1.86.2,bobby" --description "Before commit" - js_bookmark.py --create_bookmark jsbookmark1 --data_layout jstemplate1 --branch_name jsbranch1 - js_bookmark.py --activate_bookmark jsbookmark1 - js_bookmark.py --update_bookmark jsbookmark1 - js_bookmark.py --delete_bookmark jsbookmark1 - js_bookmark.py --share_bookmark jsbookmark1 - js_bookmark.py --unshare_bookmark jsbookmark1 - -Options: - --create_bookmark Name of the new JS Bookmark - --container_name Name of the container to use - --tags Tags to use for this bookmark (comma-delimited) - --description Description of this bookmark - --update_bookmark Name of the bookmark to update - --share_bookmark Name of the bookmark to share - --unshare_bookmark Name of the bookmark to unshare - --branch_name Optional: Name of the branch to use - --data_layout Name of the data layout (container or template) to use - --activate_bookmark Name of the bookmark to activate - --delete_bookmark Delete the JS Bookmark - --list_bookmarks List the bookmarks on a given engine - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --debug Enable debug logging - --parallel Limit number of jobs to maxjob - --poll The number of seconds to wait between job polls - [default: 10] - --config The path to the dxtools.conf file - [default: ./dxtools.conf] - --logdir The path to the logfile you want to use. - [default: ./js_bookmark.log] - -h --help Show this screen. - -v --version Show version. -""" - -VERSION="v.0.0.019" - -from docopt import docopt -from os.path import basename -import sys -from time import sleep, time -import traceback - -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web.jetstream import bookmark -from delphixpy.v1_8_0.web.jetstream import branch -from delphixpy.v1_8_0.web.jetstream import template -from delphixpy.v1_8_0.web.jetstream import container -from delphixpy.v1_8_0.web.vo import JSBookmarkCreateParameters -from delphixpy.v1_8_0.web.vo import JSBookmark -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import HttpError - -from lib.DlpxException import DlpxException -from lib.GetSession import GetSession -from lib.GetReferences import find_obj_by_name -from lib.GetReferences import find_obj_name -from lib.GetReferences import get_obj_reference -from lib.DxLogging import logging_est -from lib.DxLogging import print_info -from lib.DxLogging import print_debug -from lib.DxLogging import print_exception - - -def create_bookmark(dlpx_obj, bookmark_name, source_layout, branch_name=None, - tags=None, description=None): - """ - Create the JS Bookmark - - :param dlpx_obj: Virtualization Engine session object - :type dlpx_obj: lib.GetSession.GetSession - :param bookmark_name: Name of the bookmark to create - :type bookmark_name: basestring - :param source_layout: Name of the source (template or container) to use - :type source_layout: basestring - :param branch_name: Name of the branch to use - :type branch_name: basestring - :param tag_name: Tag to use for the bookmark - :type tag: basestring - :param description: Description of the bookmark - :type description: basestring - """ - - branch_ref = None - source_layout_ref = None - engine_name = dlpx_obj.dlpx_engines.keys()[0] - js_bookmark_params = JSBookmarkCreateParameters() - if branch_name: - try: - source_layout_ref = find_obj_by_name(dlpx_obj.server_session, - template, - source_layout).reference - except DlpxException: - source_layout_ref = find_obj_by_name( - dlpx_obj.server_session, container, - source_layout).reference - #import pdb;pdb.set_trace() - for branch_obj in branch.get_all(dlpx_obj.server_session): - if branch_name == branch_obj.name and \ - source_layout_ref == branch_obj.data_layout: - branch_ref = branch_obj.reference - break - if branch_ref is None: - raise DlpxException('Set the --data_layout parameter equal to ' - 'the data layout of the bookmark.\n') - elif branch_name is None: - try: - (source_layout_ref, branch_ref) = find_obj_by_name( - dlpx_obj.server_session, template, source_layout, True) - except DlpxException: - (source_layout_ref, branch_ref) = find_obj_by_name( - dlpx_obj.server_session, container, source_layout, True) - if branch_ref is None: - raise DlpxException('Could not find {} in engine {}'.format( - branch_name, engine_name)) - js_bookmark_params.bookmark = JSBookmark() - js_bookmark_params.bookmark.name = bookmark_name - js_bookmark_params.bookmark.branch = branch_ref - if tags: - js_bookmark_params.bookmark.tags = tags.split(',') - if description: - js_bookmark_params.bookmark.description = description - js_bookmark_params.timeline_point_parameters = { - 'sourceDataLayout': source_layout_ref, 'type': - 'JSTimelinePointLatestTimeInput'} - try: - bookmark.create(dlpx_obj.server_session, js_bookmark_params) - dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job - print_info('JS Bookmark {} was created successfully.'.format( - bookmark_name)) - - except (DlpxException, RequestError, HttpError) as e: - print_exception('\nThe bookmark {} was not created. The error ' - 'was:\n\n{}'.format(bookmark_name, e)) - - -def list_bookmarks(dlpx_obj, tags=None): - """ - List all bookmarks on a given engine - - :param dlpx_obj: Virtualization Engine session object - :param tag_filter: Only list bookmarks with given tag - - """ - - header = '\nName, Reference, Branch Name, Template Name, Tags' - try: - js_bookmarks = bookmark.get_all(dlpx_obj.server_session) - print header - for js_bookmark in js_bookmarks: - branch_name = find_obj_name(dlpx_obj.server_session, branch, - js_bookmark.branch) - tag_filter = [x.strip() for x in tags.decode('utf-8','ignore').split(',')] - if all(tag in js_bookmark.tags for tag in tag_filter): - print '{}, {}, {}, {}, {}'.format(js_bookmark.name, - js_bookmark.reference, - branch_name, - js_bookmark.template_name, - ", ".join(tag for tag in - js_bookmark.tags)) - elif tag_filter is None: - tag = js_bookmark.tags if js_bookmark.tags else None - if tag: - tag = ", ".join(tag for tag in js_bookmark.tags) - print '{}, {}, {}, {}, {}'.format(js_bookmark.name, - js_bookmark.reference, - branch_name, - js_bookmark.template_name, - tag) - print '\n' - - except (DlpxException, HttpError, RequestError) as e: - print_exception('\nERROR: The bookmarks on could not be listed. The ' - 'error was:\n\n{}'.format(e)) - - -def unshare_bookmark(dlpx_obj, bookmark_name): - """ - Unshare a bookmark - - :param dlpx_obj: Virtualization Engine session object - :param bookmark_name: Name of the bookmark to share - """ - - try: - bookmark.unshare(dlpx_obj.server_session, - get_obj_reference(dlpx_obj.server_session, - bookmark, bookmark_name).pop()) - print_info('JS Bookmark {} was unshared successfully.'.format( - bookmark_name)) - except (DlpxException, HttpError, RequestError) as e: - print_exception('\nERROR: The bookmark {} could not be unshared. ' - 'The error was:\n\n{}'.format(bookmark_name, e)) - - -def share_bookmark(dlpx_obj, bookmark_name): - """ - Share a bookmark - - :param dlpx_obj: Virtualization Engine session object - :param bookmark_name: Name of the bookmark to share - """ - - try: - bookmark.share(dlpx_obj.server_session, - get_obj_reference(dlpx_obj.server_session, - bookmark, bookmark_name).pop()) - print_info('JS Bookmark {} was shared successfully.'.format( - bookmark_name)) - except (DlpxException, HttpError, RequestError) as e: - print_exception('\nERROR: The bookmark {} could not be shared. The ' - 'error was:\n\n{}'.format(bookmark_name, e)) - - -def update_bookmark(dlpx_obj, bookmark_name): - """ - Updates a bookmark - - :param dlpx_obj: Virtualization Engine session object - :param bookmark_name: Name of the bookmark to update - """ - - js_bookmark_obj = JSBookmark() - - try: - bookmark.update(dlpx_obj.server_session, - get_obj_reference(dlpx_obj.server_session, - bookmark, bookmark_name).pop(), - js_bookmark_obj) - - except (DlpxException, HttpError, RequestError) as e: - print_exception('ERROR: The bookmark {} could not be updated. The ' - 'error was:\n{}'.format(bookmark_name, e)) - - -def delete_bookmark(dlpx_obj, bookmark_name): - """ - Deletes a bookmark - - :param dlpx_obj: Virtualization Engine session object - :type dlpx_obj: lib.GetSession.GetSession - :param bookmark_name: Bookmark to delete - :type bookmark_name: str - """ - - try: - bookmark.delete(dlpx_obj.server_session, - get_obj_reference(dlpx_obj.server_session, - bookmark, bookmark_name).pop()) - print_info('The bookmark {} was deleted successfully.'.format( - bookmark_name)) - except (DlpxException, HttpError, RequestError) as e: - print_exception('\nERROR: The bookmark {} was not deleted. The ' - 'error was:\n\n{}'.format(bookmark_name, e.message)) - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - - E.g.: - @run_async - def task1(): - do_something - - @run_async - def task2(): - do_something_too - - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target=func, args=args, kwargs=kwargs) - func_hl.start() - return func_hl - - return async_func - - -def time_elapsed(time_start): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - - :param time_start: start time of the script. - :type time_start: float - """ - return round((time() - time_start)/60, +1) - - -@run_async -def main_workflow(engine, dlpx_obj): - """ - This function is where we create our main workflow. - Use the @run_async decorator to run this function asynchronously. - The @run_async decorator allows us to run against multiple Delphix Engine - simultaneously - - :param engine: Dictionary of engines - :type engine: dictionary - :param dlpx_obj: Virtualization Engine session object - :type dlpx_obj: lib.GetSession.GetSession - """ - - try: - # Setup the connection to the Delphix Engine - dlpx_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - except DlpxException as e: - print_exception('ERROR: js_bookmark encountered an error authenticating' - ' to {} {}:\n{}\n'.format(engine['hostname'], - arguments['--target'], e)) - sys.exit(1) - - thingstodo = ["thingtodo"] - try: - with dlpx_obj.job_mode(single_thread): - while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0: - if len(thingstodo) > 0: - if arguments['--create_bookmark']: - create_bookmark(dlpx_obj, - arguments['--create_bookmark'], - arguments['--data_layout'], - arguments['--branch_name'] - if arguments['--branch_name'] - else None, - arguments['--tags'] - if arguments['--tags'] else None, - arguments['--description'] - if arguments['--description'] else None) - elif arguments['--delete_bookmark']: - delete_bookmark(dlpx_obj, - arguments['--delete_bookmark']) - elif arguments['--update_bookmark']: - update_bookmark(dlpx_obj, - arguments['--update_bookmark']) - elif arguments['--share_bookmark']: - share_bookmark(dlpx_obj, - arguments['--share_bookmark']) - elif arguments['--unshare_bookmark']: - unshare_bookmark(dlpx_obj, - arguments['--unshare_bookmark']) - elif arguments['--list_bookmarks']: - list_bookmarks(dlpx_obj, - arguments['--tags'] if arguments['--tags'] else None) - thingstodo.pop() - # get all the jobs, then inspect them - i = 0 - for j in dlpx_obj.jobs.keys(): - job_obj = job.get(dlpx_obj.server_session, - dlpx_obj.jobs[j]) - print_debug(job_obj) - print_info('{}: Running JS Bookmark: {}'.format( - engine['hostname'], job_obj.job_state)) - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - # If the job is in a non-running state, remove it - # from the running jobs list. - del dlpx_obj.jobs[j] - elif job_obj.job_state in 'RUNNING': - # If the job is in a running state, increment the - # running job count. - i += 1 - print_info('{}: {:d} jobs running.'.format( - engine['hostname'], i)) - # If we have running jobs, pause before repeating the - # checks. - if len(dlpx_obj.jobs) > 0: - sleep(float(arguments['--poll'])) - except (DlpxException, RequestError, JobError, HttpError) as e: - print_exception('Error in js_bookmark: {}\n{}'.format( - engine['hostname'], e)) - sys.exit(1) - - -def run_job(dlpx_obj, config_file_path): - """ - This function runs the main_workflow aynchronously against all the - servers specified - - :param dlpx_obj: Virtualization Engine session object - :type dlpx_obj: lib.GetSession.GetSession - :param config_file_path: string containing path to configuration file. - :type config_file_path: str - """ - - # Create an empty list to store threads we create. - threads = [] - engine = None - - # If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info('Executing against all Delphix Engines in the dxtools.conf') - try: - # For each server in the dxtools.conf... - for delphix_engine in dlpx_obj.dlpx_engines: - engine = dlpx_obj.dlpx_engines[delphix_engine] - # Create a new thread and add it to the list. - threads.append(main_workflow(engine, dlpx_obj)) - except DlpxException as e: - print_exception('Error encountered in run_job():\n{}'.format(e)) - sys.exit(1) - - elif arguments['--all'] is False: - # Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dlpx_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: {}\n'.format( - arguments['--engine'])) - except (DlpxException, RequestError, KeyError): - raise DlpxException('\nERROR: Delphix Engine {} cannot be ' - 'found in {}. Please check your value and' - ' try again. Exiting.\n'.format( - arguments['--engine'], config_file_path)) - else: - # Else search for a default engine in the dxtools.conf - for delphix_engine in dlpx_obj.dlpx_engines: - if dlpx_obj.dlpx_engines[delphix_engine]['default'] == 'true': - engine = dlpx_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: {}'.format( - dlpx_obj.dlpx_engines[delphix_engine]['hostname'])) - break - - if engine is None: - raise DlpxException('\nERROR: No default engine found. Exiting') - - # run the job against the engine - threads.append(main_workflow(engine, dlpx_obj)) - - # For each thread in the list... - for each in threads: - # join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def main(): - # We want to be able to call on these variables anywhere in the script. - global single_thread - global debug - - time_start = time() - single_thread = False - - try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - config_file_path = arguments['--config'] - # Parse the dxtools.conf and put it into a dictionary - dx_session_obj.get_config(config_file_path) - - # This is the function that will handle processing main_workflow for - # all the servers. - run_job(dx_session_obj, config_file_path) - - elapsed_minutes = time_elapsed(time_start) - print_info('script took {:.2f} minutes to get this far.'.format( - elapsed_minutes)) - - # Here we handle what we do when the unexpected happens - except SystemExit as e: - # This is what we use to handle our sys.exit(#) - sys.exit(e) - - except DlpxException as e: - # We use this exception handler when an error occurs in a function call. - print_exception('ERROR: Please check the ERROR message below:\n' - '{}'.format(e.message)) - sys.exit(2) - - except HttpError as e: - # We use this exception handler when our connection to Delphix fails - print_exception('ERROR: Connection failed to the Delphix Engine. Please' - 'check the ERROR message below:\n{}'.format(e.message)) - sys.exit(2) - - except JobError as e: - # We use this exception handler when a job fails in Delphix so that we - # have actionable data - print_exception('A job failed in the Delphix Engine:\n{}'.format(e.job)) - elapsed_minutes = time_elapsed(time_start) - print_exception('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) - sys.exit(3) - - except KeyboardInterrupt: - # We use this exception handler to gracefully handle ctrl+c exits - print_debug('You sent a CTRL+C to interrupt the process') - elapsed_minutes = time_elapsed(time_start) - print_info('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) - except: - # Everything else gets caught here - print_exception('{}\n{}'.format(sys.exc_info()[0], - traceback.format_exc())) - elapsed_minutes = time_elapsed(time_start) - print_info("{} took {:.2f} minutes to get this far".format( - basename(__file__), elapsed_minutes)) - sys.exit(1) - -if __name__ == "__main__": - # Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - - # Feed our arguments to the main function, and off we go! - main() diff --git a/js_branch.py b/js_branch.py deleted file mode 100755 index 725c1d5..0000000 --- a/js_branch.py +++ /dev/null @@ -1,478 +0,0 @@ -#!/usr/bin/env python -# Program Name : js_branch.py -# Description : Delphix implementation script -# Author : Corey Brune -# Created: March 4 2016 -# -# Copyright (c) 2016 by Delphix. -# All rights reserved. -# See http://docs.delphix.com/display/PS/Copyright+Statement for details -# -# Delphix Support statement available at -# See http://docs.delphix.com/display/PS/PS+Script+Support+Policy for details -# -# Warranty details provided in external file -# for customers who have purchased support. -# -"""Creates, updates, deletes, activates and lists branches -Usage: - js_branch.py (--create_branch --container_name [--template_name | --bookmark_name ]| --list_branches | --delete_branch | --activate_branch | --update_branch ) - [--engine | --all] [--parallel ] - [--poll ] [--debug] - [--config ] [--logdir ] - js_branch.py -h | --help | -v | --version - -Creates, Lists, Removes a Jet Stream Branch - -Examples: - js_branch.py --list_branches - js_branch.py --create_branch jsbranch1 --container_name jscontainer --template_name jstemplate1 - js_branch.py --activate_branch jsbranch1 - js_branch.py --delete_branch jsbranch1 - js_branch.py --update_branch jsbranch1 - -Options: - --create_branch Name of the new JS Branch - --bookmark_name Name of the container to use - --update_branch Name of the branch to update - --template_name Name of the template to use - --activate_branch Name of the branch to activate - --delete_branch Delete the JS Branch - --list_branches List the branchs on a given engine - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --debug Enable debug logging - --parallel Limit number of jobs to maxjob - --poll The number of seconds to wait between job polls - [default: 10] - --config The path to the dxtools.conf file - [default: ./dxtools.conf] - --logdir The path to the logfile you want to use. - [default: ./js_branch.log] - -h --help Show this screen. - -v --version Show version. -""" - -VERSION="v.0.0.015" - -from docopt import docopt -from os.path import basename -import sys -import traceback -import re -from time import time, sleep - -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web.jetstream import branch -from delphixpy.v1_8_0.web.jetstream import container -from delphixpy.v1_8_0.web.jetstream import template -from delphixpy.v1_8_0.web.jetstream import operation -from delphixpy.v1_8_0.web.jetstream import bookmark -from delphixpy.v1_8_0.web.vo import JSBranchCreateParameters -from delphixpy.v1_8_0.web.vo import JSTimelinePointBookmarkInput -from delphixpy.v1_8_0.web.vo import JSTimelinePointLatestTimeInput -from delphixpy.v1_8_0.web.vo import JSBranch -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import HttpError - -from lib.DlpxException import DlpxException -from lib.GetSession import GetSession -from lib.GetReferences import find_obj_by_name -from lib.GetReferences import find_obj_name -from lib.DxLogging import logging_est -from lib.DxLogging import print_info -from lib.DxLogging import print_debug -from lib.DxLogging import print_exception - - -def create_branch(dlpx_obj, branch_name, container_name, template_name=None, - bookmark_name=None): - """ - Create the JS Branch - - :param dlpx_obj: Virtualization Engine session object - :param branch_name: Name of the branch to create - :param container_name: Name of the container to use - :param template_name: Name of the template to use - :param bookmark_name: Name of the bookmark to use - """ - - js_branch = JSBranchCreateParameters() - js_branch.name = branch_name - engine_name = dlpx_obj.dlpx_engines.keys()[0] - data_container_obj = find_obj_by_name(dlpx_obj.server_session, - container, container_name) - js_branch.data_container = data_container_obj.reference - - if bookmark_name: - js_branch.timeline_point_parameters = JSTimelinePointBookmarkInput() - js_branch.timeline_point_parameters.bookmark = find_obj_by_name( - dlpx_obj.server_session, bookmark, bookmark_name).reference - elif template_name: - source_layout_ref = find_obj_by_name(dlpx_obj.server_session, - template, template_name).reference - js_branch.timeline_point_parameters = JSTimelinePointLatestTimeInput() - js_branch.timeline_point_parameters.source_data_layout = \ - source_layout_ref - - try: - branch.create(dlpx_obj.server_session, js_branch) - dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job - except (DlpxException, RequestError, HttpError) as e: - print_exception('\nThe branch was not created. The error was:' - '\n{}'.format(e)) - print_info('JS Branch {} was created successfully.'.format( - branch_name)) - - -def list_branches(dlpx_obj): - """ - List all branches on a given engine - - :param dlpx_obj: Virtualization Engine session object - """ - - try: - header = '\nBranch Name, Data Layout, Reference, End Time' - js_data_layout = '' - js_branches = branch.get_all(dlpx_obj.server_session) - - print header - for js_branch in js_branches: - js_end_time = operation.get(dlpx_obj.server_session, - js_branch.first_operation).end_time - if re.search('TEMPLATE', js_branch.data_layout): - js_data_layout = find_obj_name(dlpx_obj.server_session, - template, js_branch.data_layout) - elif re.search('CONTAINER', js_branch.data_layout): - js_data_layout = find_obj_name(dlpx_obj.server_session, - container, js_branch.data_layout) - print_info('{} {}, {}, {}'.format(js_branch._name[0], - js_data_layout, - js_branch.reference, - js_end_time)) - except (DlpxException, HttpError, RequestError) as e: - print_exception('\nERROR: JS Branches could not be listed. The ' - 'error was:\n\n{}'.format(e)) - - -def update_branch(dlpx_obj, branch_name): - """ - Updates a branch - - :param dlpx_obj: Virtualization Engine session object - :param branch_name: Name of the branch to update - """ - - js_branch_obj = JSBranch() - try: - branch_obj = find_obj_by_name(dlpx_obj.server_session, - branch, branch_name) - branch.update(dlpx_obj.server_session, branch_obj.reference, - js_branch_obj) - print_info('The branch {} was updated successfully.'.format( - branch_name)) - except (DlpxException, HttpError, RequestError) as e: - print_exception('\nERROR: The branch could not be updated. The ' - 'error was:\n\n{}'.format(e)) - - -def activate_branch(dlpx_obj, branch_name): - """ - Activates a branch - - :param dlpx_obj: Virtualization Engine session object - :param branch_name: Name of the branch to activate - """ - - engine_name = dlpx_obj.dlpx_engines.keys()[0] - try: - branch_obj = find_obj_by_name(dlpx_obj.server_session, - branch, branch_name) - branch.activate(dlpx_obj.server_session, branch_obj.reference) - dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job - print_info('The branch {} was activated successfully.'.format( - branch_name)) - except RequestError as e: - print_exception('\nAn error occurred activating the ' - 'branch:\n{}'.format(e)) - - -def delete_branch(dlpx_obj, branch_name): - """ - Deletes a branch - :param dlpx_obj: Virtualization Engine session object - :param branch_name: Branch to delete - """ - - try: - branch_obj = find_obj_by_name(dlpx_obj.server_session, - branch, branch_name) - branch.delete(dlpx_obj.server_session, branch_obj.reference) - except (DlpxException, HttpError, RequestError) as e: - print_exception('\nERROR: The branch was not deleted. The ' - 'error was:\n\n{}'.format(e.message)) - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - - E.g.: - @run_async - def task1(): - do_something - - @run_async - def task2(): - do_something_too - - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func - - -def time_elapsed(): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - """ - return round((time() - time_start)/60, +1) - - -@run_async -def main_workflow(engine, dlpx_obj): - """ - This function is where we create our main workflow. - Use the @run_async decorator to run this function asynchronously. - The @run_async decorator allows us to run against multiple Delphix Engine - simultaneously - - :param engine: Dictionary of engines - :param dlpx_obj: Virtualization Engine session object - """ - - #Establish these variables as empty for use later - environment_obj = None - source_objs = None - - try: - #Setup the connection to the Delphix Engine - dlpx_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - except DlpxException as e: - print_exception('\nERROR: Engine {} encountered an error while ' - 'provisioning {}:\n{}\n'.format(engine['hostname'], - arguments['--target'], - e)) - sys.exit(1) - - thingstodo = ["thingtodo"] - try: - with dlpx_obj.job_mode(single_thread): - while (len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0): - if len(thingstodo) > 0: - if arguments['--create_branch']: - create_branch(dlpx_obj, arguments['--create_branch'], - arguments['--container_name'], - arguments['--template_name'] - if arguments['--template_name'] else None, - arguments['--bookmark_name'] - if arguments['--bookmark_name'] else None) - elif arguments['--delete_branch']: - delete_branch(dlpx_obj, arguments['--delete_branch']) - elif arguments['--update_branch']: - update_branch(dlpx_obj, arguments['--update_branch']) - elif arguments['--activate_branch']: - activate_branch(dlpx_obj, - arguments['--activate_branch']) - elif arguments['--list_branches']: - list_branches(dlpx_obj) - thingstodo.pop() - # get all the jobs, then inspect them - i = 0 - for j in dlpx_obj.jobs.keys(): - job_obj = job.get(dlpx_obj.server_session, - dlpx_obj.jobs[j]) - print_debug(job_obj) - print_info('{}: Provisioning JS Branch: {}'.format( - engine['hostname'], job_obj.job_state)) - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - # If the job is in a non-running state, remove it - # from the running jobs list. - del dlpx_obj.jobs[j] - elif job_obj.job_state in 'RUNNING': - # If the job is in a running state, increment the - # running job count. - i += 1 - print_info('{}: {:d} jobs running.'.format( - engine['hostname'], i)) - # If we have running jobs, pause before repeating the - # checks. - if len(dlpx_obj.jobs) > 0: - sleep(float(arguments['--poll'])) - except (DlpxException, RequestError, JobError, HttpError) as e: - print_exception('\nError in js_branch: {}\n{}'.format( - engine['hostname'], e)) - - -def run_job(dlpx_obj, config_file_path): - """ - This function runs the main_workflow aynchronously against all the - servers specified - - dlpx_obj: Virtualization Engine session object - config_file_path: path containing the dxtools.conf file. - """ - #Create an empty list to store threads we create. - threads = [] - engine = None - - #If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info("Executing against all Delphix Engines in the dxtools.conf") - try: - #For each server in the dxtools.conf... - for delphix_engine in dlpx_obj.dlpx_engines: - engine = dlpx_obj.dlpx_engines[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine, dlpx_obj)) - - except DlpxException as e: - print_exception('Error encountered in run_job():\n{}'.format(e)) - elif arguments['--all'] is False: - #Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dlpx_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: {}\n'.format( - arguments['--engine'])) - - except (DlpxException, RequestError, KeyError) as e: - print_exception('\nERROR: Delphix Engine {} cannot be found' - ' in {}. Please check your value and try' - ' again. Exiting.\n'.format( - arguments['--engine'], config_file_path)) - - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dlpx_obj.dlpx_engines: - if dlpx_obj.dlpx_engines[delphix_engine]['default'] == \ - 'true': - - engine = dlpx_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: {}'.format( - dlpx_obj.dlpx_engines[delphix_engine]['hostname'])) - break - - if engine is None: - raise DlpxException("\nERROR: No default engine found. Exiting") - - #run the job against the engine - threads.append(main_workflow(engine, dlpx_obj)) - - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def main(): - #We want to be able to call on these variables anywhere in the script. - global single_thread - global time_start - global debug - - try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - time_start = time() - single_thread = False - config_file_path = arguments['--config'] - #Parse the dxtools.conf and put it into a dictionary - dx_session_obj.get_config(config_file_path) - - #This is the function that will handle processing main_workflow for - # all the servers. - run_job(dx_session_obj, config_file_path) - elapsed_minutes = time_elapsed() - print_info('Script took {:.2f} minutes to get this far.'.format( - elapsed_minutes)) - - #Here we handle what we do when the unexpected happens - except SystemExit as e: - #This is what we use to handle our sys.exit(#) - sys.exit(e) - - except DlpxException as e: - #We use this exception handler when an error occurs in a function call. - - print('\nERROR: Please check the ERROR message below:\n{}'.format( - e.message)) - sys.exit(2) - - except HttpError as e: - #We use this exception handler when our connection to Delphix fails - - print('\nERROR: Connection failed to the Delphix Engine. Please ' - 'check the ERROR message below:\n{}'.format(e.message)) - sys.exit(2) - - except JobError as e: - #We use this exception handler when a job fails in Delphix so that we - #have actionable data - - print('A job failed in the Delphix Engine:\n{}'.format(e.job)) - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) - sys.exit(3) - - except KeyboardInterrupt: - #We use this exception handler to gracefully handle ctrl+c exits - - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) - - except: - #Everything else gets caught here - - print(sys.exc_info()[0]) - print(traceback.format_exc()) - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) - sys.exit(1) - - -if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - - #Feed our arguments to the main function, and off we go! - main() diff --git a/js_container.py b/js_container.py deleted file mode 100755 index 7d10484..0000000 --- a/js_container.py +++ /dev/null @@ -1,628 +0,0 @@ -#!/usr/bin/env python -# Program Name : js_container.py -# Description : Delphix implementation script -# Author : Corey Brune -# Created: March 4 2016 -# -# Copyright (c) 2016 by Delphix. -# All rights reserved. -# See http://docs.delphix.com/display/PS/Copyright+Statement for details -# -# Delphix Support statement available at -# See http://docs.delphix.com/display/PS/PS+Script+Support+Policy for details -# -# Warranty details provided in external file -# for customers who have purchased support. -# -"""Create, delete, refresh and list JS containers. -Usage: - js_container.py (--create_container --template_name --database | --reset | --list_hierarchy | --list | --delete_container [--keep_vdbs]| --refresh_container | --add_owner --container_name | --remove_owner --container_name | --restore_container --bookmark_name ) - [--engine | --all] [--parallel ] - [--poll ] [--debug] - [--config ] [--logdir ] - js_container.py -h | --help | -v | --version - -Creates, Lists, Removes a Jet Stream Template - -Examples: - js_container.py --list - js_container.py --list_hierarchy jscontainer1 - js_container.py --add_owner jsuser - js_container.py --create_container jscontainer1 --database --template_name jstemplate1 - js_container.py --delete_container jscontainer1 - js_container.py --refresh_container jscontainer1 - js_container.py --add_owner jsuser --container_name jscontainer1 - js_container.py --remove_owner jsuser --container_name jscontainer1 - js_container.py --refresh_container jscontainer1 - js_container.py --restore_container jscontainer1 --bookmark_name jsbookmark1 - js_conatiner.py --reset jscontainer1 - -Options: - --create_container Name of the new JS Container - --container_name Name of the JS Container - --refresh_container Name of the new JS Container - --restore_container Name of the JS Container to restore - --reset Reset last data operation - --template_name Name of the JS Template to use for the container - --add_owner Name of the JS Owner for the container - --remove_owner Name of the JS Owner to remove - --bookmark_name Name of the JS Bookmark to restore the container - --keep_vdbs If set, deleting the container will not remove - the underlying VDB(s) - --list_hierarchy Lists hierarchy of a given container name - --delete_container Delete the JS Container - --database Name of the child database(s) to use for the - JS Container - --list_containers List the containers on a given engine - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --debug Enable debug logging - --parallel Limit number of jobs to maxjob - --poll The number of seconds to wait between job polls - [default: 10] - --config The path to the dxtools.conf file - [default: ./dxtools.conf] - --logdir The path to the logfile you want to use. - [default: ./js_container.log] - -h --help Show this screen. - -v --version Show version. -""" - -VERSION = "v.0.0.020" - -from os.path import basename -import sys -import traceback -from time import sleep, time -from docopt import docopt - -from delphixpy.v1_8_0.web.jetstream import container -from delphixpy.v1_8_0.web.jetstream import bookmark -from delphixpy.v1_8_0.web.jetstream import template -from delphixpy.v1_8_0.web.jetstream import datasource -from delphixpy.v1_8_0.web import database -from delphixpy.v1_8_0.web import user -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web.vo import JSDataContainerCreateParameters -from delphixpy.v1_8_0.web.vo import JSDataSourceCreateParameters -from delphixpy.v1_8_0.web.vo import JSTimelinePointBookmarkInput -from delphixpy.v1_8_0.web.vo import JSDataContainerModifyOwnerParameters -from delphixpy.v1_8_0.web.vo import JSDataContainerDeleteParameters -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import HttpError - -from lib.DlpxException import DlpxException -from lib.GetSession import GetSession -from lib.GetReferences import find_obj_by_name -from lib.GetReferences import get_obj_reference -from lib.GetReferences import find_obj_name -from lib.GetReferences import convert_timestamp -from lib.DxLogging import logging_est -from lib.DxLogging import print_info -from lib.DxLogging import print_exception -from lib.DxLogging import print_debug - - -def create_container(dlpx_obj, template_name, container_name, database_name): - """ - Create the JS container - - dlpx_obj: Virtualization Engine session object - container_name: Name of the container to create - database_name: Name of the database(s) to use in the container - """ - - js_container_params = JSDataContainerCreateParameters() - container_ds_lst = [] - engine_name = dlpx_obj.dlpx_engines.keys()[0] - for db in database_name.split(':'): - container_ds_lst.append(build_ds_params(dlpx_obj, database, db)) - - try: - js_template_obj = find_obj_by_name(dlpx_obj.server_session, - template, template_name) - js_container_params.template = js_template_obj.reference - js_container_params.timeline_point_parameters = { - 'sourceDataLayout': js_template_obj.reference, - 'type': 'JSTimelinePointLatestTimeInput'} - js_container_params.data_sources = container_ds_lst - js_container_params.name = container_name - container.create(dlpx_obj.server_session, js_container_params) - dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job - print_info('JS Container {} was created successfully.'.format( - container_name)) - except (DlpxException, RequestError, HttpError) as e: - print_exception('Container {} was not created. The error ' - 'was:\n{}\n'.format(container_name, e)) - - -def remove_owner(dlpx_obj, owner_name, container_name): - """ - Removes an owner from a container - - dlpx_obj: Virtualization Engine session object - owner_name: Name of the owner to remove - container_name: Name of the container - """ - - owner_params = JSDataContainerModifyOwnerParameters() - try: - user_ref = find_obj_by_name(dlpx_obj.server_session, - user, owner_name).reference - owner_params.owner = user_ref - container_obj = find_obj_by_name(dlpx_obj.server_session, - container, container_name) - container.remove_owner(dlpx_obj.server_session, - container_obj.reference, owner_params) - print_info('User {} was granted access to {}'.format(owner_name, - container_name)) - except (DlpxException, RequestError, HttpError) as e: - print_exception('The user was not added to container {}. The ' - 'error was:\n{}\n'.format(container_name, e)) - - -def restore_container(dlpx_obj, container_name, bookmark_name): - """ - Restores a container to a given JS bookmark - - dlpx_obj: Virtualization Engine session object - container_name: Name of the container - bookmark_name: Name of the bookmark to restore - """ - bookmark_params = JSTimelinePointBookmarkInput() - bookmark_params.bookmark = get_obj_reference(dlpx_obj.server_session, - bookmark, bookmark_name).pop() - engine_name = dlpx_obj.dlpx_engines.keys()[0] - try: - container.restore(dlpx_obj.server_session, get_obj_reference( - dlpx_obj.server_session, container, container_name).pop(), - bookmark_params) - dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job - print_info('Container {} was restored successfully with ' - 'bookmark {}'.format(container_name, bookmark_name)) - except (DlpxException, RequestError, HttpError) as e: - print_exception('The user was not added to container {}. The ' - 'error was:\n{}\n'.format(container_name, e)) - - -def add_owner(dlpx_obj, owner_name, container_name): - """ - Adds an owner to a container - - dlpx_obj: Virtualization Engine session object - owner_name: Grant authorizations for the given user on this container and - parent template - container_name: Name of the container - """ - - owner_params = JSDataContainerModifyOwnerParameters() - try: - owner_params.owner = get_obj_reference(dlpx_obj.server_session, - user, owner_name).pop() - container.add_owner(dlpx_obj.server_session, - get_obj_reference(dlpx_obj.server_session, - container, container_name).pop(), - owner_params) - print_info('User {} was granted access to {}'.format(owner_name, - container_name)) - except (DlpxException, RequestError, HttpError) as e: - print_exception('The user was not added to container {}. The error' - ' was:\n{}\n'.format(container_name, e)) - - -def refresh_container(dlpx_obj, container_name): - """ - Refreshes a container - - dlpx_obj: Virtualization Engine session object - container_name: Name of the container to refresh - """ - - engine_name = dlpx_obj.dlpx_engines.keys()[0] - try: - container.refresh(dlpx_obj.server_session, - get_obj_reference(dlpx_obj.server_session, - container, container_name).pop()) - dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job - print_info('The container {} was refreshed.'.format(container_name)) - except (DlpxException, RequestError, HttpError) as e: - print_exception('\nContainer {} was not refreshed. The error ' - 'was:\n{}\n'.format(container_name, e)) - - -def delete_container(dlpx_obj, container_name, keep_vdbs=False): - """ - Deletes a container - - dlpx_obj: Virtualization Engine session object - container_name: Container to delete - """ - - try: - if keep_vdbs: - js_container_params = JSDataContainerDeleteParameters() - js_container_params.delete_data_sources = False - container.delete(dlpx_obj.server_session, - get_obj_reference(dlpx_obj.server_session, - container, container_name).pop(), - js_container_params) - elif keep_vdbs is False: - container.delete(dlpx_obj.server_session, - get_obj_reference(dlpx_obj.server_session, - container, container_name).pop()) - except (DlpxException, RequestError, HttpError) as e: - print_exception('\nContainer {} was not deleted. The error ' - 'was:\n{}\n'.format(container_name, e)) - - -def list_containers(dlpx_obj): - """ - List all containers on a given engine - - dlpx_obj: Virtualization Engine session object - """ - - header = 'Name, Active Branch, Owner, Reference, Template, Last Updated' - js_containers = container.get_all(dlpx_obj.server_session) - try: - print header - for js_container in js_containers: - last_updated = convert_timestamp(dlpx_obj.server_session, - js_container.last_updated[:-5]) - print_info('{}, {}, {}, {}, {}, {}'.format(js_container.name, - js_container.active_branch, str(js_container.owner), - str(js_container.reference), str(js_container.template), - last_updated)) - except (DlpxException, HttpError, RequestError) as e: - print_exception('\nERROR: JS Containers could not be listed. The ' - 'error was:\n\n{}'.format(e)) - - -def reset_container(dlpx_obj, container_name): - """ - Undo the last refresh or restore operation - :param dlpx_obj: Virtualization Engine session object - :param container_name: Name of the container to reset - """ - try: - container.reset(dlpx_obj.server_session, find_obj_by_name( - dlpx_obj.server_session, container, container_name).reference) - except RequestError as e: - print_exception('\nERROR: JS Container was not reset. The ' - 'error was:\n\n{}'.format(e)) - print 'Container {} was reset.\n'.format(container_name) - - - -def list_hierarchy(dlpx_obj, container_name): - """ - Filter container listing. - - dlpx_obj: Virtualization Engine session object - container_name: Name of the container to list child VDBs. - """ - - database_dct = {} - layout_ref = find_obj_by_name(dlpx_obj.server_session, container, - container_name).reference - for ds in datasource.get_all(dlpx_obj.server_session, - data_layout=layout_ref): - db_name = (find_obj_name(dlpx_obj.server_session, - database, ds.container)) - if hasattr(ds.runtime, 'jdbc_strings'): - database_dct[db_name] = ds.runtime.jdbc_strings - else: - database_dct[db_name] = 'None' - try: - print_info('Container: {}\nRelated VDBs: {}\n'.format( - container_name, convert_dct_str(database_dct))) - except (AttributeError, DlpxException) as e: - print_exception(e) - - -def convert_dct_str(obj_dct): - """ - Convert dictionary into a string for printing - - obj_dct: Dictionary to convert into a string - :return: string object - """ - js_str = '' - - if isinstance(obj_dct, dict): - for js_db, js_jdbc in obj_dct.iteritems(): - if isinstance(js_jdbc, list): - js_str += '{}: {}\n'.format(js_db, ', '.join(js_jdbc)) - elif isinstance(js_jdbc, str): - js_str += '{}: {}\n'.format(js_db, js_jdbc) - else: - raise DlpxException('Passed a non-dictionary object to ' - 'convert_dct_str(): {}'.format(type(obj_dct))) - return js_str - - -def build_ds_params(dlpx_obj, obj, db): - """ - Builds the datasource parameters - - dlpx_obj: Virtualization Engine session object - obj: object type to use when finding db - db: Name of the database to use when building the parameters - """ - ds_params = JSDataSourceCreateParameters() - ds_params.source = {'type': 'JSDataSource', 'name': db} - try: - db_obj = find_obj_by_name(dlpx_obj.server_session, obj, db) - ds_params.container = db_obj.reference - return ds_params - except RequestError as e: - print_exception('\nCould not find {}\n{}'.format(db, e.message)) - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - - E.g.: - @run_async - def task1(): - do_something - - @run_async - def task2(): - do_something_too - - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target=func, args=args, kwargs=kwargs) - func_hl.start() - return func_hl - - return async_func - - -def time_elapsed(): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - """ - return round((time() - time_start)/60, +1) - - -@run_async -def main_workflow(engine, dlpx_obj): - """ - This function is where we create our main workflow. - Use the @run_async decorator to run this function asynchronously. - The @run_async decorator allows us to run against multiple Delphix Engine - simultaneously - - engine: Dictionary of engines - dlpx_obj: Virtualization Engine session object - """ - - try: - #Setup the connection to the Delphix Engine - dlpx_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - - except DlpxException as e: - print_exception('\nERROR: Engine {} encountered an error while ' - 'creating the session:\n{}\n'.format( - dlpx_obj.dlpx_engines['hostname'], e)) - sys.exit(1) - - thingstodo = ["thingtodo"] - try: - with dlpx_obj.job_mode(single_thread): - while len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0: - if len(thingstodo) > 0: - if arguments['--create_container']: - create_container(dlpx_obj, - arguments['--template_name'], - arguments['--create_container'], - arguments['--database']) - elif arguments['--delete_container']: - delete_container(dlpx_obj, - arguments['--delete_container'], - arguments['--keep_vdbs']) - elif arguments['--list']: - list_containers(dlpx_obj) - elif arguments['--remove_owner']: - remove_owner(dlpx_obj, arguments['--remove_owner'], - arguments['--container_name']) - elif arguments['--restore_container']: - restore_container(dlpx_obj, - arguments['--restore_container'], - arguments['--bookmark_name']) - elif arguments['--add_owner']: - add_owner(dlpx_obj, arguments['--add_owner'], - arguments['--container_name']) - elif arguments['--refresh_container']: - refresh_container(dlpx_obj, - arguments['--refresh_container']) - elif arguments['--list_hierarchy']: - list_hierarchy(dlpx_obj, arguments['--list_hierarchy']) - elif arguments['--reset']: - reset_container(dlpx_obj, arguments['--reset']) - thingstodo.pop() - # get all the jobs, then inspect them - i = 0 - for j in dlpx_obj.jobs.keys(): - job_obj = job.get(dlpx_obj.server_session, - dlpx_obj.jobs[j]) - print_debug(job_obj) - print_info('{}: JS Container operations: {}'.format( - engine['hostname'], job_obj.job_state)) - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - # If the job is in a non-running state, remove it - # from the - # running jobs list. - del dlpx_obj.jobs[j] - elif job_obj.job_state in 'RUNNING': - # If the job is in a running state, increment the - # running job count. - i += 1 - print_info('{}: {:d} jobs running.'.format( - engine['hostname'], i)) - # If we have running jobs, pause before repeating the - # checks. - if len(dlpx_obj.jobs) > 0: - sleep(float(arguments['--poll'])) - - except (DlpxException, RequestError, JobError, HttpError) as e: - print '\nError in js_container: {}:\n{}'.format(engine['hostname'], e) - sys.exit(1) - - -def run_job(dlpx_obj, config_file_path): - """ - This function runs the main_workflow aynchronously against all the - servers specified - - dlpx_obj: Virtualization Engine session object - config_file_path: filename of the configuration file for virtualization - engines - """ - - #Create an empty list to store threads we create. - threads = [] - engine = None - - #If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info("Executing against all Delphix Engines in the dxtools.conf") - - try: - #For each server in the dxtools.conf... - for delphix_engine in dlpx_obj.dlpx_engines: - engine = dlpx_obj.dlpx_engines[delphix_engine] - #Create a new thread and add it to the list. - threads.append(main_workflow(engine, dlpx_obj)) - - except DlpxException as e: - print 'Error encountered in run_job():\n{}'.format(e) - sys.exit(1) - - elif arguments['--all'] is False: - #Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dlpx_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: {}\n'.format( - arguments['--engine'])) - except (DlpxException, RequestError, KeyError) as e: - print_exception('\nERROR: Delphix Engine {} cannot be ' - 'found in {}. Please check your value ' - 'and try again. Exiting.\n'.format( - arguments['--engine'], config_file_path)) - else: - #Else search for a default engine in the dxtools.conf - for delphix_engine in dlpx_obj.dlpx_engines: - if dlpx_obj.dlpx_engines[delphix_engine]['default'] == \ - 'true': - engine = dlpx_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: {}'.format( - dlpx_obj.dlpx_engines[delphix_engine]['hostname'])) - break - if engine is None: - raise DlpxException("\nERROR: No default engine found. Exiting") - - #run the job against the engine - threads.append(main_workflow(engine, dlpx_obj)) - #For each thread in the list... - for each in threads: - #join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def main(): - """ - Main function - setup global variables and timer - """ - #We want to be able to call on these variables anywhere in the script. - global single_thread - global time_start - global debug - - if arguments['--debug']: - debug = True - try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - time_start = time() - single_thread = False - config_file_path = arguments['--config'] - #Parse the dxtools.conf and put it into a dictionary - dx_session_obj.get_config(config_file_path) - - #This is the function that will handle processing main_workflow for - # all the servers. - run_job(dx_session_obj, config_file_path) - - elapsed_minutes = time_elapsed() - print_info('script took {:.2f} minutes to get this far.'.format( - elapsed_minutes)) - - #Here we handle what we do when the unexpected happens - except SystemExit as e: - #This is what we use to handle our sys.exit(#) - sys.exit(e) - except DlpxException as e: - #We use this exception handler when an error occurs in a function call. - print_exception('\nERROR: Please check the ERROR message ' - 'below:\n{}'.format(e.message)) - sys.exit(2) - except HttpError as e: - #We use this exception handler when our connection to Delphix fails - print '\nERROR: Connection failed to the Delphix Engine. Please ' \ - 'check the ERROR message below:\n{}'.format(e.message) - sys.exit(2) - except JobError as e: - #We use this exception handler when a job fails in Delphix so that we - #have actionable data - print 'A job failed in the Delphix Engine:\n{}'.format(e.job) - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) - sys.exit(3) - except KeyboardInterrupt: - #We use this exception handler to gracefully handle ctrl+c exits - print_debug("You sent a CTRL+C to interrupt the process") - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) - except: - #Everything else gets caught here - print sys.exc_info()[0] - print traceback.format_exc() - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) - sys.exit(1) - - -if __name__ == "__main__": - #Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - - #Feed our arguments to the main function, and off we go! - main() diff --git a/js_template.py b/js_template.py deleted file mode 100755 index a95ef97..0000000 --- a/js_template.py +++ /dev/null @@ -1,413 +0,0 @@ -#!/usr/bin/env python -# Program Name : js_template.py -# Description : Delphix implementation script -# Author : Corey Brune -# Created: March 4 2016 -# -# Copyright (c) 2016 by Delphix. -# All rights reserved. -# See http://docs.delphix.com/display/PS/Copyright+Statement for details -# -# Delphix Support statement available at -# See http://docs.delphix.com/display/PS/PS+Script+Support+Policy for details -# -# Warranty details provided in external file -# for customers who have purchased support. -# -"""Creates, deletes and lists JS templates. -Usage: - js_template.py (--create_template --database | --list_templates | --delete_template ) - [--engine | --all] [--parallel ] - [--poll ] [--debug] - [--config ] [--logdir ] - js_template.py -h | --help | -v | --version - -Creates, Lists, Removes a Jet Stream Template - -Examples: - js_template.py --list_templates - js_template.py --create_template jstemplate1 --database - js_template.py --create_template jstemplate2 --database - js_template.py --delete_template jstemplate1 - -Options: - --create_template Name of the new JS Template - --delete_template Delete the JS Template - --database Name of the database(s) to use for the JS Template - Note: If adding multiple template DBs, use a - comma (:) to delineate between the DB names. - --list_templates List the templates on a given engine - --engine Alt Identifier of Delphix engine in dxtools.conf. - --all Run against all engines. - --debug Enable debug logging - --parallel Limit number of jobs to maxjob - --poll The number of seconds to wait between job polls - [default: 10] - --config The path to the dxtools.conf file - [default: ./dxtools.conf] - --logdir The path to the logfile you want to use. - [default: ./js_template.log] - -h --help Show this screen. - -v --version Show version. -""" - -VERSION="v.0.0.015" - -from docopt import docopt -from os.path import basename -import sys -import traceback -from time import time, sleep - -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web.jetstream import template -from delphixpy.v1_8_0.web import database -from delphixpy.v1_8_0.web.vo import JSDataTemplateCreateParameters -from delphixpy.v1_8_0.web.vo import JSDataSourceCreateParameters -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import HttpError - -from lib.DlpxException import DlpxException -from lib.GetSession import GetSession -from lib.GetReferences import find_obj_by_name -from lib.GetReferences import convert_timestamp -from lib.DxLogging import logging_est -from lib.DxLogging import print_info -from lib.DxLogging import print_debug -from lib.DxLogging import print_exception - - -def create_template(dlpx_obj, template_name, database_name): - """ - Create the JS Template - - dlpx_obj: Virtualization Engine session object - template_name: Name of the template to create - database_name: Name of the database(s) to use in the template - """ - - js_template_params = JSDataTemplateCreateParameters() - js_template_params.name = template_name - template_ds_lst = [] - engine_name = dlpx_obj.dlpx_engines.keys()[0] - - for db in database_name.split(':'): - template_ds_lst.append(build_ds_params(dlpx_obj, database, db)) - try: - js_template_params.data_sources = template_ds_lst - js_template_params.type = 'JSDataTemplateCreateParameters' - template.create(dlpx_obj.server_session, js_template_params) - dlpx_obj.jobs[engine_name] = dlpx_obj.server_session.last_job - print_info('Template {} was created successfully.\n'.format( - template_name)) - except (DlpxException, RequestError, HttpError) as e: - print_exception('\nThe template {} was not created. The error ' - 'was:\n\n{}'.format(template_name, e)) - - -def list_templates(dlpx_obj): - """ - List all templates on a given engine - - dlpx_obj: Virtualization Engine session object - """ - - header = 'Name, Reference, Active Branch, Last Updated' - - try: - print header - js_templates = template.get_all(dlpx_obj.server_session) - for js_template in js_templates: - last_updated = convert_timestamp(dlpx_obj.server_session, - js_template.last_updated[:-5]) - print_info('{}, {}, {}, {}'.format(js_template.name, - js_template.reference, - js_template.active_branch, - last_updated)) - except (DlpxException, HttpError, RequestError) as e: - raise DlpxException('\nERROR: The templates could not be listed. ' - 'The error was:\n\n{}'.format(e.message)) - - -def delete_template(dlpx_obj, template_name): - """ - Deletes a template - - dlpx_obj: Virtualization Engine session object - template_name: Template to delete - """ - - try: - template_obj = find_obj_by_name(dlpx_obj.server_session, - template, template_name) - template.delete(dlpx_obj.server_session, - template_obj.reference) - print 'Template {} is deleted.'.format(template_name) - except (DlpxException, HttpError, RequestError) as e: - print_exception('\nERROR: The template {} was not deleted. The' - ' error was:\n\n{}'.format(template_name, e.message)) - - -def build_ds_params(dlpx_obj, obj, db): - """ - Builds the datasource parameters - - dlpx_obj: Virtualization Engine session object - obj: object type to use when finding db - db: Name of the database to use when building the parameters - """ - - try: - db_obj = find_obj_by_name(dlpx_obj.server_session, - obj, db) - ds_params = JSDataSourceCreateParameters() - ds_params.source = {'type':'JSDataSource', 'name': db} - ds_params.container = db_obj.reference - return ds_params - except RequestError as e: - print_exception('\nCould not find {}\n{}'.format(db, e.message)) - - -def run_async(func): - """ - http://code.activestate.com/recipes/576684-simple-threading-decorator/ - run_async(func) - function decorator, intended to make "func" run in a separate - thread (asynchronously). - Returns the created Thread object - - E.g.: - @run_async - def task1(): - do_something - - @run_async - def task2(): - do_something_too - - t1 = task1() - t2 = task2() - ... - t1.join() - t2.join() - """ - from threading import Thread - from functools import wraps - - @wraps(func) - def async_func(*args, **kwargs): - func_hl = Thread(target = func, args = args, kwargs = kwargs) - func_hl.start() - return func_hl - - return async_func - - -def time_elapsed(): - """ - This function calculates the time elapsed since the beginning of the script. - Call this anywhere you want to note the progress in terms of time - """ - return round((time() - time_start)/60, +1) - - -@run_async -def main_workflow(engine, dlpx_obj): - """ - This function is where we create our main workflow. - Use the @run_async decorator to run this function asynchronously. - The @run_async decorator allows us to run against multiple Delphix Engine - simultaneously - - engine: Dictionary of engines - dlpx_obj: Virtualization Engine session object - """ - - try: - #Setup the connection to the Delphix Engine - dlpx_obj.serversess(engine['ip_address'], engine['username'], - engine['password']) - except DlpxException as e: - print_exception('\nERROR: Engine {} encountered an error while ' - 'provisioning {}:\n{}\n'.format( - dlpx_obj.engine['hostname'], arguments['--target'], e)) - sys.exit(1) - - thingstodo = ["thingtodo"] - try: - with dlpx_obj.job_mode(single_thread): - while (len(dlpx_obj.jobs) > 0 or len(thingstodo) > 0): - if len(thingstodo) > 0: - if arguments['--create_template']: - create_template(dlpx_obj, - arguments['--create_template'], - arguments['--database']) - elif arguments['--delete_template']: - delete_template(dlpx_obj, - arguments['--delete_template']) - elif arguments['--list_templates']: - list_templates(dlpx_obj) - thingstodo.pop() - # get all the jobs, then inspect them - i = 0 - for j in dlpx_obj.jobs.keys(): - job_obj = job.get(dlpx_obj.server_session, - dlpx_obj.jobs[j]) - print_debug(job_obj) - print_info('{}: Provisioning JS Template: {}'.format( - engine['hostname'], job_obj.job_state)) - if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: - # If the job is in a non-running state, remove it - # from the running jobs list. - del dlpx_obj.jobs[j] - elif job_obj.job_state in 'RUNNING': - # If the job is in a running state, increment the - # running job count. - i += 1 - print_info('{}: {:d} jobs running.'.format( - engine['hostname'], i)) - # If we have running jobs, pause before repeating the - # checks. - if len(dlpx_obj.jobs) > 0: - sleep(float(arguments['--poll'])) - except (DlpxException, RequestError, JobError, HttpError) as e: - print_exception('\nError in js_template: {}:\n{}'.format( - engine['hostname'], e)) - sys.exit(1) - - -def run_job(dlpx_obj, config_file_path): - """ - This function runs the main_workflow aynchronously against all the - servers specified - - dlpx_obj: Virtualization Engine session object - config_file_path: path containing the dxtools.conf file. - """ - # Create an empty list to store threads we create. - threads = [] - engine = None - - # If the --all argument was given, run against every engine in dxtools.conf - if arguments['--all']: - print_info('Executing against all Delphix Engines in the dxtools.conf') - - try: - # For each server in the dxtools.conf... - for delphix_engine in dlpx_obj.dlpx_engines: - engine = dlpx_obj.dlpx_engines[delphix_engine] - # Create a new thread and add it to the list. - threads.append(main_workflow(engine, dlpx_obj)) - except DlpxException as e: - print 'Error encountered in run_job():\n{}'.format(e) - sys.exit(1) - elif arguments['--all'] is False: - # Else if the --engine argument was given, test to see if the engine - # exists in dxtools.conf - if arguments['--engine']: - try: - engine = dlpx_obj.dlpx_engines[arguments['--engine']] - print_info('Executing against Delphix Engine: {}\n'.format( - arguments['--engine'])) - - except (DlpxException, RequestError, KeyError): - raise DlpxException('\nERROR: Delphix Engine {} cannot be ' 'found in %s. Please check your value ' - 'and try again. Exiting.\n'.format( - arguments['--engine'], config_file_path)) - else: - # Else search for a default engine in the dxtools.conf - for delphix_engine in dlpx_obj.dlpx_engines: - if dlpx_obj.dlpx_engines[delphix_engine]['default'] == 'true': - engine = dlpx_obj.dlpx_engines[delphix_engine] - print_info('Executing against the default Delphix Engine ' - 'in the dxtools.conf: {}'.format( - dlpx_obj.dlpx_engines[delphix_engine]['hostname'])) - break - if engine is None: - raise DlpxException('\nERROR: No default engine found. Exiting') - # run the job against the engine - threads.append(main_workflow(engine, dlpx_obj)) - - # For each thread in the list... - for each in threads: - # join them back together so that we wait for all threads to complete - # before moving on - each.join() - - -def main(): - # We want to be able to call on these variables anywhere in the script. - global single_thread - global time_start - global debug - - try: - dx_session_obj = GetSession() - logging_est(arguments['--logdir']) - print_debug(arguments) - time_start = time() - config_file_path = arguments['--config'] - - - logging_est(arguments['--logdir']) - print_debug(arguments) - single_thread = False - # Parse the dxtools.conf and put it into a dictionary - dx_session_obj.get_config(config_file_path) - - # This is the function that will handle processing main_workflow for - # all the servers. - run_job(dx_session_obj, config_file_path) - - elapsed_minutes = time_elapsed() - print_info('script took {:.2f} to get this far.'.format( - elapsed_minutes)) - - # Here we handle what we do when the unexpected happens - except SystemExit as e: - # This is what we use to handle our sys.exit(#) - sys.exit(e) - - except DlpxException as e: - # We use this exception handler when an error occurs in a function call. - print_info('\nERROR: Please check the ERROR message below:\n{}'.format( - e.message)) - sys.exit(2) - - except HttpError as e: - # We use this exception handler when our connection to Delphix fails - print_info('\nERROR: Connection failed to the Delphix Engine. Please ' - 'check the ERROR message below:\n{}'.format(e.message)) - sys.exit(2) - - except JobError as e: - # We use this exception handler when a job fails in Delphix so that we - # have actionable data - print('A job failed in the Delphix Engine:\n{}'.format(e.job)) - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) - sys.exit(3) - - except KeyboardInterrupt: - # We use this exception handler to gracefully handle ctrl+c exits - print_debug('You sent a CTRL+C to interrupt the process') - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) - except: - # Everything else gets caught here - print '{}\n{}'.format(sys.exc_info()[0], traceback.format_exc()) - elapsed_minutes = time_elapsed() - print_info('{} took {:.2f} minutes to get this far'.format( - basename(__file__), elapsed_minutes)) - sys.exit(1) - - -if __name__ == "__main__": - # Grab our arguments from the doc at the top of the script - arguments = docopt(__doc__, version=basename(__file__) + " " + VERSION) - - # Feed our arguments to the main function, and off we go! - main() diff --git a/lib/DlpxException.py b/lib/DlpxException.py deleted file mode 100644 index 874e732..0000000 --- a/lib/DlpxException.py +++ /dev/null @@ -1,15 +0,0 @@ -""" -Custom exception class for delphixpy scripts -""" - -from DxLogging import print_exception - -class DlpxException(Exception): - """ - Delphix Exception class. Exit signals are handled by calling method. - """ - - - def __init__(self, message): -# print_exception(message) - Exception.__init__(self, message) diff --git a/lib/DxLogging.py b/lib/DxLogging.py deleted file mode 100644 index b2ef466..0000000 --- a/lib/DxLogging.py +++ /dev/null @@ -1,63 +0,0 @@ -""" -Package DxLogging -""" - -import logging - -VERSION = 'v.0.1.005' - -def logging_est(logfile_path, debug=False): - """ - Establish Logging - - logfile_path: path to the logfile. Default: current directory. - debug: Set debug mode on (True) or off (False). Default: False - """ - - logging.basicConfig(filename=logfile_path, - format='%(levelname)s:%(asctime)s:%(message)s', - level=logging.INFO, datefmt='%Y-%m-%d %H:%M:%S') - - logger = logging.getLogger() - - if debug is True: - logger.setLevel(10) - print_info('Debug Logging is enabled.') - - -def print_debug(print_obj, debug=False): - """ - Call this function with a log message to prefix the message with DEBUG - - print_obj: Object to print to logfile and stdout - debug: Flag to enable debug logging. Default: False - :rtype: None - """ - try: - if debug is True: - print 'DEBUG: {}'.format(str(print_obj)) - logging.debug(str(print_obj)) - except: - pass - - -def print_info(print_obj): - """ - Call this function with a log message to prefix the message with INFO - """ - print 'INFO: {}'.format(str(print_obj)) - logging.info(str(print_obj)) - -def print_warning(print_obj): - """ - Call this function with a log message to prefix the message with INFO - """ - print 'WARN: %s' % (str(print_obj)) - logging.warn(str(print_obj)) - -def print_exception(print_obj): - """ - Call this function with a log message to prefix the message with EXCEPTION - """ - print str(print_obj) - logging.exception('EXCEPTION: {}'.format(str(print_obj))) diff --git a/lib/DxTimeflow.py b/lib/DxTimeflow.py deleted file mode 100644 index d69c2b0..0000000 --- a/lib/DxTimeflow.py +++ /dev/null @@ -1,313 +0,0 @@ -""" -List, create, destroy and refresh Delphix timeflows -""" -# TODO: -# implement debug flag - -import re -import sys - -from delphixpy.v1_8_0.exceptions import HttpError, JobError, RequestError -from delphixpy.v1_8_0.web import database -from delphixpy.v1_8_0.web import timeflow -from delphixpy.v1_8_0.web import snapshot -from delphixpy.v1_8_0 import job_context -from delphixpy.v1_8_0.web.timeflow import bookmark -from delphixpy.v1_8_0.web.vo import OracleRefreshParameters -from delphixpy.v1_8_0.web.vo import OracleTimeflowPoint -from delphixpy.v1_8_0.web.vo import RefreshParameters -from delphixpy.v1_8_0.web.vo import TimeflowPointLocation -from delphixpy.v1_8_0.web.vo import MSSqlTimeflowPoint -from delphixpy.v1_8_0.web.vo import TimeflowPointTimestamp -from delphixpy.v1_8_0.web.vo import TimeflowPointSemantic - -from DlpxException import DlpxException -from GetReferences import get_obj_reference -from GetReferences import convert_timestamp -from GetReferences import find_obj_by_name -from DxLogging import print_exception - -VERSION = 'v.0.2.003' - -class DxTimeflow(object): - """Shared methods for timeflows """ - - def __init__(self, engine): - self.engine = engine - - - def get_timeflow_reference(self, db_name): - """ - Return current_timeflow for the db_name - - db_name: The database name to retrieve current_timeflow - """ - - db_lst = database.get_all(self.engine) - - for db_obj in db_lst: - if db_obj.name == db_name: - return db_obj.current_timeflow - - raise DlpxException('Timeflow reference not found for {}'.format( - db_name)) - - - def list_timeflows(self): - """ - Retrieve and print all timeflows for a given engine - """ - - all_timeflows = timeflow.get_all(self.engine) - - print 'DB Name, Timeflow Name, Timestamp' - for tfbm_lst in all_timeflows: - - try: - db_name = get_obj_reference(self.engine, database, - tfbm_lst.container) - - print '{}, {}, {}\n'.format(str(db_name), - str(tfbm_lst.name), - str(tfbm_lst.parent_point.timestamp)) - - except AttributeError: - print '{}, {}\n'.format(str(tfbm_lst.name), str(db_name)) - - except TypeError as e: - raise DlpxException('Listing Timeflows encountered an error' - ':\n{}'.format((e))) - - except RequestError as e: - dlpx_err = e.message - raise DlpxException(dlpx_err.action) - - except (JobError, HttpError) as e: - raise DlpxException(e) - - - def create_bookmark(self, bookmark_name, db_name, timestamp=None, - location=None): - """ - Create a timeflow bookmark - - bookmark_name: Bookmark's name - db_name: The database name to re - timestamp: Timestamp for the bookmark. - Required format is (UTC/Zulu): YYYY-MM-DDTHH:MM:SS.000Z - location: Location of the bookmark - """ - - global bookmark_type - tf_ref = self.get_timeflow_reference(db_name) - - if re.search('ORAC', tf_ref, re.IGNORECASE): - bookmark_type = 'OracleTimeflowPoint' - otfp = OracleTimeflowPoint() - elif re.search('MSSql', tf_ref, re.IGNORECASE): - bookmark_type = 'MSSqlTimeflowPoint' - otfp = MSSqlTimeflowPoint() - - otfp.type = bookmark_type - otfp.timeflow = tf_ref - - if timestamp is not None: - otfp.timestamp = timestamp - else: - otfp.location = location - - tf_create_params = TimeflowBookmarkCreateParameters() - tf_create_params.name = bookmark_name - tf_create_params.timeflow_point = otfp - - try: - print 'Bookmark {} successfully created with reference {}'.format( - bookmark.bookmark.create(self.engine, tf_create_params)) - - except RequestError as e: - raise DlpxException(e.message) - - except (JobError, HttpError): - print_exception('Fatal exception caught while creating the' - 'Timeflow Bookmark:\n{}\n'.format( - sys.exc_info()[0])) - - - def get_bookmarks(self, parsable=False): - """ - Print all Timeflow Bookmarks - - parsable (optional): Flag to print output in a parsable format. - """ - - all_bookmarks = bookmark.bookmark.get_all(self.engine) - - if parsable is False: - print('\nBookmark name\tReference\tTimestamp\t' - 'Location\tTimeflow\n') - - elif parsable is True: - print 'Bookmark name,Reference,Timestamp,Location,Timeflow' - - for tfbm_lst in all_bookmarks: - try: - if tfbm_lst.timestamp is None: - converted_timestamp = None - - else: - converted_timestamp = \ - convert_timestamp(self.engine, tfbm_lst.timestamp[:-5]) - - if parsable is False: - print '{} {} {} {} {}'.format(tfbm_lst.name, - tfbm_lst.reference, str(converted_timestamp), - tfbm_lst.location, tfbm_lst.timeflow) - elif parsable is True: - print '{},{},{},{},{}'.format(tfbm_lst.name, - tfbm_lst.reference, str(converted_timestamp), - tfbm_lst.location, tfbm_lst.timeflow) - - except TypeError: - print 'No timestamp found for {}'.format(tfbm_lst.name) - - except RequestError as e: - dlpx_err = e.message - raise DlpxException(dlpx_err.action) - - - def find_snapshot(self, database_ref, timestamp, snap_name=None, - snap_time=None): - """ - Method to find a snapshot by name - - database_obj: database reference for the snapshot lookup - snap_name: name of the snapshot. Default: None - snap_time: time of the snapshot. Default: None - """ - - snapshots = snapshot.get_all(self.engine, database=database_ref) - - matches = [] - for snapshot_obj in snapshots: - if (str(snapshot_obj.name).startswith(timestamp) and - snap_name is not None): - - matches.append(snapshot_obj) - - elif (str(snapshot_obj.latest_change_point.timestamp).startswith(timestamp) - and snap_time is not None): - - matches.append(snapshot_obj) - - if len(matches) == 1: - return matches[0] - - elif len(matches) > 1: - raise DlpxException('{}: The name specified was not specific ' - 'enough. More than one match found.\n'.format( - self.engine.address)) - - elif len(matches) < 1: - raise DlpxException('{}: No matches found for the time ' - 'specified.\n'.format(self.engine.address)) - - - def set_timeflow_point(self, container_obj, timestamp_type, - timestamp='LATEST', timeflow_name=None): - """ - This method returns the reference of the timestamp specified. - container_obj: Delphix object containing the snapshot/timeflow to be - provisioned. - timestamp_type: Type of timestamp - SNAPSHOT or TIME - timestamp: Name of timestamp/snapshot. Default: Latest - """ - - if timestamp_type.upper() == "SNAPSHOT": - if timestamp.upper() == "LATEST": - timeflow_point_parameters = TimeflowPointSemantic() - timeflow_point_parameters.container = container_obj.reference - timeflow_point_parameters.location = "LATEST_SNAPSHOT" - - elif timestamp.startswith("@"): - snapshot_obj = self.find_snapshot(container_obj.reference, - timestamp, snap_name=True) - - if snapshot_obj: - timeflow_point_parameters=TimeflowPointLocation() - timeflow_point_parameters.timeflow = snapshot_obj.timeflow - timeflow_point_parameters.location = \ - snapshot_obj.latest_change_point.location - - else: - raise DlpxException('ERROR: Was unable to use the ' - 'specified snapshot {}' - 'for database {}'.format(timestamp, - container_obj.name)) - - elif timestamp: - snapshot_obj = self.find_snapshot(container_obj.reference, - timestamp, snap_time=True) - - if snapshot_obj: - timeflow_point_parameters=TimeflowPointTimestamp() - timeflow_point_parameters.timeflow = snapshot_obj.timeflow - timeflow_point_parameters.timestamp = \ - snapshot_obj.latest_change_point.timestamp - - elif snapshot_obj is None: - print_exception('Was unable to find a suitable time' - ' for {} for database {}'.format( - (timestamp, container_obj.name))) - - elif timestamp_type.upper() == "TIME": - if timestamp.upper() == "LATEST": - timeflow_point_parameters = TimeflowPointSemantic() - timeflow_point_parameters.location = "LATEST_POINT" - - elif timestamp: - timeflow_point_parameters = TimeflowPointTimestamp() - timeflow_point_parameters.type = 'TimeflowPointTimestamp' - timeflow_obj = find_obj_by_name(self.engine, timeflow, - timeflow_name) - - timeflow_point_parameters.timeflow = timeflow_obj.reference - timeflow_point_parameters.timestamp = timestamp - return timeflow_point_parameters - else: - raise DlpxException('{} is not a valid timestamp_type. Exiting' - '\n'.format(timestamp_type)) - - timeflow_point_parameters.container = container_obj.reference - return timeflow_point_parameters - - - def refresh_container(self, parent_bookmark_ref, db_type, child_db_ref): - """ - Refreshes a container - - parent_bookmark_ref: The parent's bookmark reference. - db_type: The database type - child_db_ref: The child database reference - """ - - if db_type == 'Oracle': - tf_params = OracleRefreshParameters() - else: - tf_params = RefreshParameters() - - tf_params.timeflow_point_parameters = {'type': 'TimeflowPointBookmark', - 'bookmark': parent_bookmark_ref} - - try: - with job_context.async(self.engine): - db_ret_val = database.refresh(self.engine, child_db_ref, - tf_params) - return db_ret_val - - except RequestError as e: - dlpx_err = e.message - raise DlpxException(dlpx_err.action) - - except (JobError, HttpError) as e: - print_exception('Exception caught during refresh:\n{}'.format( - sys.exc_info()[0])) diff --git a/lib/GetReferences.py b/lib/GetReferences.py deleted file mode 100644 index 75f711f..0000000 --- a/lib/GetReferences.py +++ /dev/null @@ -1,288 +0,0 @@ -""" -Module that provides lookups of references and names of Delphix objects. -""" - -import re -from datetime import datetime -from dateutil import tz - -from delphixpy.v1_8_0.web.service import time -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.web import repository -from delphixpy.v1_8_0.web import database -from delphixpy.v1_8_0.web import source -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web import sourceconfig - -from DlpxException import DlpxException -from DxLogging import print_debug -from DxLogging import print_exception - -VERSION = 'v.0.2.0019' - -def convert_timestamp(engine, timestamp): - """ - Convert timezone from Zulu/UTC to the Engine's timezone - engine: A Delphix engine session object. - timestamp: the timstamp in Zulu/UTC to be converted - """ - - default_tz = tz.gettz('UTC') - engine_tz = time.time.get(engine) - - try: - convert_tz = tz.gettz(engine_tz.system_time_zone) - utc = datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S') - utc = utc.replace(tzinfo=default_tz) - converted_tz = utc.astimezone(convert_tz) - engine_local_tz = '{} {} {}'.format(str(converted_tz.date()), - str(converted_tz.time()), - str(converted_tz.tzname())) - - return engine_local_tz - except TypeError: - return None - - -def find_all_objects(engine, f_class): - """ - Return all objects from a given class - engine: A Delphix engine session object - f_class: The objects class. I.E. database or timeflow. - :return: List of objects - """ - - return_lst = [] - - try: - return f_class.get_all(engine) - - except (JobError, HttpError) as e: - raise DlpxException('{} Error encountered in {}: {}\n'.format( - engine.address, f_class, e)) - - -def find_obj_specs(engine, obj_lst): - """ - Function to find objects for replication - engine: Delphix Virtualization session object - obj_lst: List of names for replication - :return: List of references for the given object names - """ - rep_lst = [] - for obj in obj_lst: - rep_lst.append(find_obj_by_name(engine, database, obj).reference) - return rep_lst - - -def get_running_job(engine, target_ref): - """ - Function to find a running job from the DB target reference. - :param engine: A Virtualization engine session object - :param target_ref: Reference to the target of the running job - :return: - """ - return job.get_all(engine, target=target_ref, - job_state='RUNNING')[0].reference - - -def find_obj_list(obj_lst, obj_name): - """ - Function to find an object in a list of objects - obj_lst: List containing objects from the get_all() method - obj_name: Name of the object to match - :return: The named object. None is returned if no match is found.` - """ - for obj in obj_lst: - if obj_name == obj.name: - return obj - return None - - -def find_obj_by_name(engine, f_class, obj_name, active_branch=False): - """ - Function to find objects by name and object class, and return object's - reference as a string - engine: A Delphix engine session object - f_class: The objects class. I.E. database or timeflow. - obj_name: The name of the object - active_branch: Default = False. If true, return list containing - the object's reference and active_branch. Otherwise, return - the reference. - """ - - return_list = [] - - try: - all_objs = f_class.get_all(engine) - except AttributeError as e: - raise DlpxException('Could not find reference for object class' - '{}.\n'.format(e)) - for obj in all_objs: - if obj.name == obj_name: - - if active_branch is False: - return(obj) - - #This code is for JS objects only. - elif active_branch is True: - return_list.append(obj.reference) - return_list.append(obj.active_branch) - return(return_list) - - return obj - - #If the object isn't found, raise an exception. - raise DlpxException('{} was not found on engine {}.\n'.format( - obj_name, engine.address)) - -def find_source_by_dbname(engine, f_class, obj_name, active_branch=False): - """ - Function to find sources by database name and object class, and return object's - reference as a string - engine: A Delphix engine session object - f_class: The objects class. I.E. database or timeflow. - obj_name: The name of the database object in Delphix - active_branch: Default = False. If true, return list containing - the object's reference and active_branch. Otherwise, return - the reference. - """ - - return_list = [] - - try: - all_objs = f_class.get_all(engine) - except AttributeError as e: - raise DlpxException('Could not find reference for object class' - '{}.\n'.format(e)) - for obj in all_objs: - - if obj.name == obj_name: - print_debug('object: {}\n\n'.format(obj)) - print_debug(obj.name) - print_debug(obj.reference) - source_obj = source.get_all(engine,database=obj.reference) - print_debug('source: {}\n\n'.format(source_obj)) - return source_obj[0] - - #If the object isn't found, raise an exception. - raise DlpxException('{} was not found on engine {}.\n'.format( - obj_name, engine.address)) - - -def get_obj_reference(engine, obj_type, obj_name, search_str=None, - container=False): - """ - Return the reference for the provided object name - engine: A Delphix engine object. - results: List containing object name - search_str (optional): string to search within results list - container (optional): search for container instead of name - """ - - ret_lst = [] - - results = obj_type.get_all(engine) - - for result in results: - if container is False: - if result.name == obj_name: - ret_lst.append(result.reference) - - if search_str: - if re.search(search_str, result.reference, re.IGNORECASE): - ret_lst.append(True) - else: - ret_lst.append(False) - - return ret_lst - else: - if result.container == obj_name: - ret_lst.append(result.reference) - - return ret_lst - - raise DlpxException('Reference not found for {}'.format(obj_name)) - - -def find_obj_name(engine, f_class, obj_reference): - """ - Return the obj name from obj_reference - - engine: A Delphix engine object. - f_class: The objects class. I.E. database or timeflow. - obj_reference: The object reference to retrieve the name - """ - try: - obj_name = f_class.get(engine, obj_reference) - return obj_name.name - - except RequestError as e: - raise DlpxException(e) - - except (JobError, HttpError) as e: - raise DlpxException(e.message) - - -def find_dbrepo(engine, install_type, f_environment_ref, f_install_path): - """ - Function to find database repository objects by environment reference and - install path, and return the object's reference as a string - You might use this function to find Oracle and PostGreSQL database repos. - engine: Virtualization Engine Session object - install_type: Type of install - Oracle, ASE, SQL - f_environment_ref: Reference of the environment for the repository - f_install_path: Path to the installation directory. - return: delphixpy.web.vo.SourceRepository object - """ - - print_debug('Searching objects in the %s class for one with the ' - 'environment reference of %s and an install path of %s' % - (install_type, f_environment_ref, f_install_path)) - #import pdb;pdb.set_trace() - all_objs = repository.get_all(engine, environment=f_environment_ref) - for obj in all_objs: - if 'OracleInstall' == install_type: - if (obj.type == install_type and - obj.installation_home == f_install_path): - - print_debug('Found a match %s'.format(obj.reference)) - return obj - - elif 'MSSqlInstance' == install_type: - if (obj.type == install_type and - obj.instance_name == f_install_path): - - print_debug('Found a match {}'.format(obj.reference)) - return obj - - else: - raise DlpxException('No Repo match found for type {}.\n'.format( - install_type)) - -def find_sourceconfig(engine, sourceconfig_name, f_environment_ref): - """ - Function to find database sourceconfig objects by environment reference and - sourceconfig name (db name), and return the object's reference as a string - You might use this function to find Oracle and PostGreSQL database - sourceconfigs. - engine: Virtualization Engine Session object - sourceconfig_name: Name of source config, usually name of db - instnace (ie. orcl) - f_environment_ref: Reference of the environment for the repository - return: delphixpy.web.vo.SourceConfig object - """ - - print_debug('Searching objects in the SourceConfig class for one with the ' - 'environment reference of %s and a name of %s' % - (f_environment_ref, sourceconfig_name)) - all_objs = sourceconfig.get_all(engine, environment=f_environment_ref) - for obj in all_objs: - if obj.name == sourceconfig_name: - print_debug('Found a match %s'.format(obj.reference)) - return obj - else: - raise DlpxException('No sourceconfig match found for type {}.' - '\n'.format(sourceconfig_name)) diff --git a/lib/GetSession.py b/lib/GetSession.py deleted file mode 100644 index e75a742..0000000 --- a/lib/GetSession.py +++ /dev/null @@ -1,169 +0,0 @@ -#!/usr/bin/env python -# Corey Brune - Oct 2016 -#This class handles the config file and authentication to a VE -#requirements -#pip install docopt delphixpy - -"""This module takes the conf file for VE(s) and returns an authentication - object -""" - -import json -import ssl -from time import sleep - -from delphixpy.v1_8_0.delphix_engine import DelphixEngine -from delphixpy.v1_8_0.exceptions import RequestError -from delphixpy.v1_8_0.exceptions import JobError -from delphixpy.v1_8_0.exceptions import HttpError -from delphixpy.v1_8_0 import job_context -from delphixpy.v1_8_0.web import job -from delphixpy.v1_8_0.web import system - -from lib.DlpxException import DlpxException -from lib.DxLogging import print_debug -from lib.DxLogging import print_info - - -VERSION = 'v.0.2.09' - - -class GetSession(object): - """ - Class to get the configuration and returns an Delphix authentication - object - """ - - def __init__(self): - self.server_session = None - self.dlpx_engines = {} - self.jobs = {} - - - def __getitem__(self, key): - return self.data[key] - - - def get_config(self, config_file_path='./dxtools.conf'): - """ - This method reads in the dxtools.conf file - - config_file_path: path to the configuration file. - Default: ./dxtools.conf - """ - - #config_file_path = config_file_path - #config_file = None - - #First test to see that the file is there and we can open it - try: - with open(config_file_path) as config_file: - - #Now parse the file contents as json and turn them into a - #python dictionary, throw an error if it isn't proper json - config = json.loads(config_file.read()) - - except IOError: - raise DlpxException('\nERROR: Was unable to open {}. Please ' - 'check the path and permissions, and try ' - 'again.\n'.format(config_file_path)) - - except (ValueError, TypeError, AttributeError) as e: - raise DlpxException('\nERROR: Was unable to read {} as json. ' - 'Please check if the file is in a json format' - ' and try again.\n {}'.format(config_file_path, - e)) - - #Create a dictionary of engines (removing the data node from the - # dxtools.json, for easier parsing) - for each in config['data']: - self.dlpx_engines[each['hostname']] = each - - - def serversess(self, f_engine_address, f_engine_username, - f_engine_password, f_engine_namespace='DOMAIN'): - """ - Method to setup the session with the Virtualization Engine - - f_engine_address: The Virtualization Engine's address (IP/DNS Name) - f_engine_username: Username to authenticate - f_engine_password: User's password - f_engine_namespace: Namespace to use for this session. Default: DOMAIN - """ - -# if use_https: -# if hasattr(ssl, '_create_unverified_context'): -# ssl._create_default_https_context = \ -# ssl._create_unverified_context - - try: - if f_engine_password: - self.server_session = DelphixEngine(f_engine_address, - f_engine_username, - f_engine_password, - f_engine_namespace) - elif f_engine_password is None: - self.server_session = DelphixEngine(f_engine_address, - f_engine_username, - None, f_engine_namespace) - - except (HttpError, RequestError, JobError) as e: - raise DlpxException('ERROR: An error occurred while authenticating' - ' to {}:\n {}\n'.format(f_engine_address, e)) - - - def job_mode(self, single_thread=True): - """ - This method tells Delphix how to execute jobs, based on the - single_thread variable - - single_thread: Execute application synchronously (True) or - async (False) - Default: True - """ - - #Synchronously (one at a time) - if single_thread is True: - print_debug("These jobs will be executed synchronously") - return job_context.sync(self.server_session) - - #Or asynchronously - elif single_thread is False: - print_debug("These jobs will be executed asynchronously") - return job_context.async(self.server_session) - - - def job_wait(self): - """ - This job stops all work in the thread/process until jobs are completed. - - No arguments - """ - #Grab all the jos on the server (the last 25, be default) - all_jobs = job.get_all(self.server_session) - - #For each job in the list, check to see if it is running (not ended) - for jobobj in all_jobs: - if not (jobobj.job_state in ["CANCELED", "COMPLETED", "FAILED"]): - print_debug('\nDEBUG: Waiting for %s (currently: %s) to ' - 'finish running against the container.\n' % - (jobobj.reference, jobobj.job_state)) - - #If so, wait - job_context.wait(self.server_session, jobobj.reference) - - def server_wait(self): - """ - This job just waits for the Delphix Engine to be up and for a - succesful connection. - - No arguments - """ - while True: - try: - system.get(self.server_session) - break - except: - pass - print_info("Waiting for Delphix Engine to be ready") - sleep(3) diff --git a/lib/__init__.py b/lib/__init__.py index 9eae820..e4fe97e 100644 --- a/lib/__init__.py +++ b/lib/__init__.py @@ -1,5 +1,5 @@ -import DlpxException -import DxLogging -import DxTimeflow -import GetReferences -import GetSession +from . import dlpx_exceptions +from . import dx_logging +from . import dx_timeflow +from . import get_references +from . import get_session diff --git a/lib/dlpx_exceptions.py b/lib/dlpx_exceptions.py new file mode 100644 index 0000000..2b1a80c --- /dev/null +++ b/lib/dlpx_exceptions.py @@ -0,0 +1,56 @@ +""" +Custom exception class for delphixpy scripts +""" + + +class DlpxException(BaseException): + """ + Delphix Exception class. Exit signals are handled by calling method. + """ + + def __init__(self, error): + super(DlpxException, self).__init__(error) + self._error = error + + @property + def error(self): + """ + Return an DlpxException object describing this error. + """ + return self.error + + +class DlpxObjectNotFound(BaseException): + """ + Delphix Exception class. Exit signals are handled by calling method. + Raised when a Delphix Object is not found + """ + + def __init__(self, message): + super(DlpxObjectNotFound, self).__init__(message) + self._message = message + + @property + def message(self): + """ + Return an ErrorResult object describing this request message. + """ + return self._message + + +class DlpxObjectExists(BaseException): + """ + Delphix Exception class. Exit signals are handled by calling method. + Raised when a Delphix Object is found + """ + + def __init__(self, message): + super(DlpxObjectExists, self).__init__(message) + self._message = message + + @property + def message(self): + """ + Return an ErrorResult object describing this request message. + """ + return self._message diff --git a/lib/dsource_link.py b/lib/dsource_link.py new file mode 100644 index 0000000..1da9dac --- /dev/null +++ b/lib/dsource_link.py @@ -0,0 +1,84 @@ +""" +Create an object to link MS SQL or ASE dSources +""" + +from delphixpy.v1_10_2.web import environment +from delphixpy.v1_10_2.web import group +from delphixpy.v1_10_2.web import sourceconfig +from delphixpy.v1_10_2.web import vo +from lib import dlpx_exceptions +from lib import get_references + +VERSION = "v.0.3.003" + + +class DsourceLink: + """ + Base class for linking dSources + """ + + def __init__(self, dlpx_obj, dsource_name, db_passwd, db_user, dx_group, db_type): + """ + Attributes required for linking MS SQL or ASE dSources + :param dlpx_obj: A Delphix DDP session object + :type dlpx_obj: lib.get_session.GetSession + :param dsource_name: Name of the dsource + :type dsource_name: str + :param dx_group: Group name of where the dSource will reside + :type dx_group: str + :param db_passwd: Password of the db_user + :type db_passwd: str + :param db_user: Username of the dSource + :type db_user: str + :param db_type: dSource type. mssql, sybase or oracle + :type db_type: str + """ + self.dlpx_obj = dlpx_obj + self.dx_group = dx_group + self.db_passwd = db_passwd + self.db_user = db_user + self.dsource_name = dsource_name + self.db_type = db_type + self.engine_name = list(dlpx_obj.dlpx_ddps)[0] + self.link_params = vo.LinkParameters() + self.srccfg_obj = None + + def dsource_prepare_link(self): + """ + Prepare the dsource object for linking + """ + self.link_params.name = self.dsource_name + if self.db_type.lower() == "oracle": + self.link_params.link_data = vo.OracleLinkData() + elif self.db_type.lower() == "sybase": + self.link_params.link_data = vo.ASELinkData() + elif self.db_type.lower() == "mssql": + self.link_params.link_data = vo.MSSqlLinkData() + self.link_params.group = get_references.find_obj_by_name( + self.dlpx_obj.server_session, group, self.dx_group + ).reference + self.link_params.link_data.db_credentials = vo.PasswordCredential() + self.link_params.link_data.db_credentials.password = self.db_passwd + self.link_params.link_data.db_user = self.db_user + # Create blank sourcing policy + self.link_params.link_data.sourcing_policy = vo.SourcingPolicy() + self.link_params.link_data.sourcing_policy.logsync_enabled = False + self.link_params.link_data.config = self.get_or_create_sourceconfig( + self.srccfg_obj + ) + return self.link_params + + def get_or_create_sourceconfig(self, sourceconfig_obj=None): + """ + Get current sourceconfig or create it + :param sourceconfig_obj: + :return: link_params + """ + try: + return get_references.find_obj_by_name( + self.dlpx_obj.server_session, sourceconfig, self.dsource_name + ).reference + except dlpx_exceptions.DlpxObjectNotFound: + self.link_params.link_data.config = sourceconfig.create( + self.dlpx_obj.server_session, sourceconfig_obj + ).reference diff --git a/lib/dsource_link_ase.py b/lib/dsource_link_ase.py new file mode 100644 index 0000000..95854cb --- /dev/null +++ b/lib/dsource_link_ase.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python3 +""" +Link an ASE Sybase dSource +""" +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import database +from delphixpy.v1_10_2.web import environment +from delphixpy.v1_10_2.web import repository +from delphixpy.v1_10_2.web import vo +from lib import dlpx_exceptions +from lib import get_references +from lib.dsource_link import DsourceLink + +VERSION = "v.0.3.002" + + +class DsourceLinkASE(DsourceLink): + """ + Derived class implementing linking of a ASE Sybase dSource + """ + + def __init__( + self, dlpx_obj, dsource_name, db_passwd, db_user, dx_group, logsync, db_type + ): + """ + Constructor method + :param dlpx_obj: A Delphix DDP session object + :type dlpx_obj: lib.get_session.GetSession + :param dsource_name: Name of the dsource + :type dsource_name: str + :param dx_group: Group name of where the dSource will reside + :type dx_group: str + :param db_passwd: Password of the db_user + :type db_passwd: str + :param db_user: Username of the dSource + :type db_user: str + :param logsync: Enable logsync + :type logsync: bool + :param db_type: dSource type. mssql, sybase or oracle + :type db_type: str + """ + super().__init__(dlpx_obj, dsource_name, db_passwd, db_user, dx_group, db_type) + self.dlpx_obj = dlpx_obj + self.dsource_name = dsource_name + self.db_passwd = db_passwd + self.db_user = db_user + self.dx_group = dx_group + self.logsync = logsync + self.db_type = db_type + + def link_ase_dsource( + self, backup_path, bck_file, create_bckup, env_name, stage_repo + ): + """ + Link an ASE dSource + :param backup_path: Path to the ASE/MSSQL backups + :type backup_path: str + :param bck_file: Fully qualified name of backup file + :type bck_file: str + :param create_bckup: Create and ingest a new Sybase backup + :type create_bckup: str + :param env_name: Name of the environment where the dSource running + :type env_name: str + :param stage_repo: Stage repository name in Delphix + :type stage_repo: str + """ + link_params = super().dsource_prepare_link() + link_params.link_data.load_backup_path = backup_path + if bck_file: + link_params.link_data.sync_parameters = vo.ASESpecificBackupSyncParameters() + bck_files = bck_file.split(" ") + link_params.link_data.sync_parameters.backup_files = bck_files + elif create_bckup: + link_params.link_data.sync_parameters = vo.ASENewBackupSyncParameters() + else: + link_params.link_data.sync_parameters = vo.ASELatestBackupSyncParameters() + try: + env_user_ref = ( + link_params.link_data.stage_user + ) = get_references.find_obj_by_name( + self.dlpx_obj.server_session, environment, env_name + ).primary_user + link_params.link_data.staging_host_user = env_user_ref + link_params.link_data.source_host_user = env_user_ref + link_params.link_data.staging_repository = get_references.find_obj_by_name( + self.dlpx_obj.server_session, repository, stage_repo + ).reference + except dlpx_exceptions.DlpxException as err: + raise dlpx_exceptions.DlpxException( + f"Could not link {self.dsource_name}:\n{err}" + ) + try: + dsource_ref = database.link(self.dlpx_obj.server_session, link_params) + self.dlpx_obj.jobs[self.engine_name] = self.dlpx_obj.server_session.last_job + print(f"{dsource_ref} successfully linked {self.dsource_name}") + except (exceptions.RequestError, exceptions.HttpError) as err: + raise dlpx_exceptions.DlpxException( + f"Database link failed for {self.dsource_name}:\n{err}" + ) diff --git a/lib/dsource_link_mssql.py b/lib/dsource_link_mssql.py new file mode 100644 index 0000000..f3524d7 --- /dev/null +++ b/lib/dsource_link_mssql.py @@ -0,0 +1,208 @@ +#!/usr/bin/env python3 +""" +Link a MSSQL dSource +""" +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import database +from delphixpy.v1_10_2.web import environment +from delphixpy.v1_10_2.web import vo +from lib import dlpx_exceptions +from lib import get_references +from lib.dsource_link import DsourceLink + +VERSION = "v.0.3.001" + + +class DsourceLinkMssql(DsourceLink): + """ + Derived class implementing linking of a MSSQL dSource + """ + + def __init__( + self, + dlpx_obj, + dsource_name, + db_passwd, + db_user, + dx_group, + db_type, + logsync, + validated_sync_mode, + initial_load_type, + delphix_managed=False, + ): + """ + Constructor method + :param dlpx_obj: A Delphix DDP session object + :type dlpx_obj: lib.get_session.GetSession + :param dsource_name: Name of the dsource + :type dsource_name: str + :param dx_group: Group name of where the dSource will reside + :type dx_group: str + :param db_passwd: Password of the db_user + :type db_passwd: str + :param db_user: Username of the dSource + :type db_user: str + :param db_type: dSource type. mssql, sybase or oracle + :type db_type: str + """ + super().__init__(dlpx_obj, dsource_name, db_passwd, db_user, dx_group, db_type) + self.dlpx_obj = dlpx_obj + self.dsource_name = dsource_name + self.db_passwd = db_passwd + self.db_user = db_user + self.dx_group = dx_group + self.db_type = db_type + self.logsync = logsync + self.validated_sync_mode = validated_sync_mode + self.initial_load_type = initial_load_type + self.delphix_managed = delphix_managed + if delphix_managed: + self.initial_load_type = "COPY_ONLY" + + def get_or_create_mssql_sourcecfg( + self, + env_name, + db_install_path, + stage_env, + stage_instance, + backup_path, + backup_loc_passwd, + backup_loc_user, + ip_addr=None, + port_num=None, + backup_uuid=None, + ): + """ + Create the sourceconfig used for provisioning an MSSQL dSource + :param env_name: Name of the environment in Delphix + :type env_name: str + :param db_install_path: Path to where the Oracle binaries are installed + :type db_install_path: str + """ + env_obj = get_references.find_obj_by_name( + self.dlpx_obj.server_session, environment, env_name + ) + repo_ref = get_references.find_db_repo( + self.dlpx_obj.server_session, + "MSSqlInstance", + env_obj.reference, + db_install_path, + ) + + # source config for single instance MSSQL + sourcecfg_params = vo.MSSqlSIConfig() + sourcecfg_params.user = self.db_user + sourcecfg_params.credentials = vo.PasswordCredential() + sourcecfg_params.credentials.password = self.db_passwd + sourcecfg_params.database_name = self.dsource_name + # sourcecfg_params.unique_name = self.dsource_name + sourcecfg_params.repository = repo_ref + sourcecfg_params.environment_user = env_obj.primary_user + sourcecfg_params.recovery_model = "FULL" + self.link_mssql_dsource( + stage_env, + stage_instance, + backup_path, + backup_loc_passwd, + backup_loc_user, + backup_uuid, + ) + + def link_mssql_dsource( + self, + stage_env, + stage_instance, + backup_path, + backup_loc_passwd, + backup_loc_user, + uuid, + ): + """ + Link an MSSQL dSource + :param stage_env: Name of the staging environment + :type stage_env: str + :param stage_instance: Name if the staging database instance + :type stage_instance: str + :param backup_path: Directory of where the backup is located + :type backup_path: str + :param backup_loc_passwd: Password of the shared backup path + :type backup_loc_passwd: str + :param backup_loc_user: Username for the shared backup path + :type backup_loc_user: str + """ + link_params = super().dsource_prepare_link() + if self.delphix_managed: + link_params.link_data.ingestion_strategy = ( + vo.DelphixManagedBackupIngestionStrategy() + ) + link_params.link_data.ingestion_strategy.backup_policy = "PRIMARY" + link_params.link_data.ingestion_strategy.compression_enabled = False + else: + link_params.link_data.ingestion_strategy = ( + vo.ExternalBackupIngestionStrategy() + ) + link_params.link_data.ingestion_strategy.validated_sync_mode = ( + self.validated_sync_mode + ) + link_params.link_data.sourcing_policy = vo.SourcingPolicy() + link_params.link_data.sourcing_policy.logsync_enabled = False + if self.validated_sync_mode and self.validated_sync_mode == "TRANSACTION_LOG": + link_params.link_data.sourcing_policy.logsync_enabled = self.logsync + try: + env_obj_ref = get_references.find_obj_by_name( + self.dlpx_obj.server_session, environment, stage_env + ).reference + ppt_repo_ref = get_references.find_db_repo( + self.dlpx_obj.server_session, + "MSSqlInstance", + env_obj_ref, + stage_instance, + ) + link_params.link_data.ppt_repository = ppt_repo_ref + except dlpx_exceptions.DlpxException as err: + raise dlpx_exceptions.DlpxException( + f"Could not link {self.dsource_name}:\n{err}" + ) + + # specifying backup locations + link_params.link_data.shared_backup_locations = [] + if backup_path and backup_path != "auto": + link_params.link_data.shared_backup_locations = backup_path.split(":") + if backup_loc_passwd: + link_params.link_data.backup_location_credentials = vo.PasswordCredential() + link_params.link_data.backup_location_credentials.password = ( + backup_loc_passwd + ) + link_params.link_data.backup_location_user = backup_loc_user + + # specify the initial sync Parameters + if self.initial_load_type and self.initial_load_type == "SPECIFIC": + link_params.link_data.sync_parameters = ( + vo.MSSqlExistingSpecificBackupSyncParameters() + ) + link_params.link_data.sync_parameters.backup_uuid = uuid + elif self.initial_load_type and self.initial_load_type == "COPY_ONLY": + link_params.link_data.sync_parameters = ( + vo.MSSqlNewCopyOnlyFullBackupSyncParameters() + ) + link_params.link_data.sync_parameters.backup_policy = "PRIMARY" + link_params.link_data.sync_parameters.compression_enabled = False + else: + link_params.link_data.sync_parameters = ( + vo.MSSqlExistingMostRecentBackupSyncParameters() + ) + + try: + database.link(self.dlpx_obj.server_session, link_params) + except ( + exceptions.HttpError, + exceptions.RequestError, + exceptions.JobError, + ) as err: + dlpx_exceptions.DlpxException( + f"Database link failed for {self.dsource_name}:{err}" + ) + self.dlpx_obj.jobs[self.dlpx_obj.server_session.address].append( + self.dlpx_obj.server_session.last_job + ) diff --git a/lib/dsource_link_oracle.py b/lib/dsource_link_oracle.py new file mode 100644 index 0000000..25fdac7 --- /dev/null +++ b/lib/dsource_link_oracle.py @@ -0,0 +1,144 @@ +""" +Create an object to link Oracle dSources +""" +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import database +from delphixpy.v1_10_2.web import environment +from delphixpy.v1_10_2.web import sourceconfig +from delphixpy.v1_10_2.web import vo +from lib import dlpx_exceptions +from lib import get_references +from lib.dsource_link import DsourceLink + +VERSION = "v.0.3.003" + + +class DsourceLinkOracle(DsourceLink): + """ + Class for linking Oracle dSources + """ + + def __init__( + self, + dlpx_obj, + dsource_name, + db_passwd, + db_user, + dx_group, + logsync, + logsync_mode, + db_type, + ): + """ + Attributes required for linking MS SQL or ASE dSources + :param dlpx_obj: A Delphix DDP session object + :type dlpx_obj: lib.get_session.GetSession + :param dsource_name: Name of the dsource + :type dsource_name: str + :param dx_group: Group name of where the dSource will reside + :type dx_group: str + :param db_passwd: Password of the db_user + :type db_passwd: str + :param db_user: Username of the dSource + :type db_user: str + :param logsync: Enable logsync + :type logsync: bool + :param logsync_mode: logsync mode + :type logsync: str + :param db_type: dSource type. mssql, sybase or oracle + :type db_type: str + """ + super().__init__(dlpx_obj, dsource_name, db_passwd, db_user, dx_group, db_type) + self.dlpx_obj = dlpx_obj + self.dx_group = dx_group + self.db_passwd = db_passwd + self.db_user = db_user + self.dsource_name = dsource_name + self.db_type = db_type + self.logsync = logsync + self.logsync_mode = logsync_mode + + def get_or_create_ora_sourcecfg( + self, env_name, db_install_path, ip_addr, port_num=1521 + ): + """ + Create the sourceconfig used for provisioning an Oracle dSource + :param env_name: Name of the environment in Delphix + :type env_name: str + :param db_install_path: Path to where the Oracle binaries are installed + :type db_install_path: str + :param ip_addr: IP Address of the Delphix environment. Used for the + Oracle connection string + :type ip_addr: str + :param port_num: Port number of the Oracle Listener (1521 default) + :type port_num: int + """ + port_num = str(port_num) + try: + env_obj = get_references.find_obj_by_name( + self.dlpx_obj.server_session, environment, env_name + ) + repo_ref = get_references.find_db_repo( + self.dlpx_obj.server_session, + "OracleInstall", + env_obj.reference, + db_install_path, + ) + except dlpx_exceptions.DlpxObjectNotFound as err: + raise dlpx_exceptions.DlpxException( + f"ERROR: Unable to find " f"reference to repository:{err}" + ) + sourcecfg_params = vo.OracleSIConfig() + connect_str = f"jdbc:oracle:thin:@{ip_addr}:{port_num}:" f"{self.dsource_name}" + sourcecfg_params.user = self.db_user + sourcecfg_params.environment_user = env_obj.primary_user + sourcecfg_params.credentials = vo.PasswordCredential() + sourcecfg_params.credentials.password = self.db_passwd + sourcecfg_params.database_name = self.dsource_name + sourcecfg_params.unique_name = self.dsource_name + sourcecfg_params.instance = vo.OracleInstance() + sourcecfg_params.instance.instance_name = self.dsource_name + sourcecfg_params.instance.instance_number = 1 + sourcecfg_params.services = vo.OracleService() + sourcecfg_params.repository = repo_ref + sourcecfg_params.jdbcConnectionString = connect_str + self.link_ora_dsource(env_obj.primary_user) + + def link_ora_dsource( + self, primary_user_ref, num_connections=5, files_per_set=5, rman_channels=2 + ): + """ + Link an Oracle dSource + :param primary_user_ref: Reference to the environment user + :type primary_user_ref: str + :param num_connections: Number of connections for Oracle RMAN + :type num_connections: int + :param files_per_set: Configures how many files per set for Oracle RMAN + :type files_per_set: int + :param rman_channels: Configures the number of Oracle RMAN Channels + :type rman_channels: int + :return: Reference of the linked dSource + """ + link_params = super().dsource_prepare_link() + link_params.link_data.sourcing_policy = vo.OracleSourcingPolicy() + link_params.link_data.compressedLinkingEnabled = True + link_params.link_data.environment_user = primary_user_ref + link_params.link_data.number_of_connections = int(num_connections) + link_params.link_data.link_now = True + link_params.link_data.files_per_set = int(files_per_set) + link_params.link_data.rman_channels = int(rman_channels) + link_params.link_data.sourcing_policy.logsync_enabled = self.logsync + link_params.link_data.sourcing_policy.logsync_mode = self.logsync_mode + try: + database.link(self.dlpx_obj.server_session, link_params) + self.dlpx_obj.jobs[self.dlpx_obj.server_session.address].append( + self.dlpx_obj.server_session.last_job + ) + except ( + exceptions.HttpError, + exceptions.RequestError, + exceptions.JobError, + ) as err: + dlpx_exceptions.DlpxException( + f"Database link failed for {self.dsource_name}:\n{err}" + ) diff --git a/lib/dx_logging.py b/lib/dx_logging.py new file mode 100644 index 0000000..e5d9426 --- /dev/null +++ b/lib/dx_logging.py @@ -0,0 +1,69 @@ +""" +Package DxLogging +""" + +import logging + +VERSION = "v.0.3.000" + + +def logging_est(logfile_path, debug=False): + """ + Establish Logging + + :param logfile_path: path to the logfile. Default: current directory. + :type logfile_path: str + :param debug: Set debug mode on (True) or off (False). + :type debug: bool + """ + logging.basicConfig( + filename=logfile_path, + format="%(levelname)s:%(asctime)s:%(message)s", + level=logging.INFO, + datefmt="%Y-%m-%d %H:%M:%S", + ) + logger = logging.getLogger() + if debug is True: + logger.setLevel(10) + print_info("Debug Logging is enabled.") + + +def print_debug(print_obj): + """ + Call this function with a log message to prefix the message with DEBUG + :param print_obj: Object to print to logfile and stdout + :type print_obj: type depends on objecting being passed. Typically str + """ + print(f"DEBUG: {str(print_obj)}") + logging.debug(str(print_obj)) + + +def print_info(print_obj): + """ + Call this function with a log message to prefix the message with INFO + :param print_obj: Object to print to logfile and stdout + :type print_obj: type depends on objecting being passed. Typically str + """ + print(f"INFO: {print_obj}") + logging.info(str(print_obj)) + + +def print_warning(print_obj): + """ + Call this function with a log message to prefix the message with INFO + :param print_obj: Object to print to logfile and stdout + :type print_obj: type depends on objecting being passed. Typically str + """ + print(f"WARN: {print_obj}") + logging.warning(str(print_obj)) + + +def print_exception(print_obj): + """ + Call this function with a log message to prefix the message with EXCEPTION + :param print_obj: Object to print to logfile and stdout + :type print_obj: type depends on objecting being passed. Typically str + + """ + print(str(print_obj)) + logging.error("EXCEPTION: %s" % (str(print_obj))) diff --git a/lib/dx_timeflow.py b/lib/dx_timeflow.py new file mode 100644 index 0000000..3591238 --- /dev/null +++ b/lib/dx_timeflow.py @@ -0,0 +1,256 @@ +""" +List, create, destroy and refresh Delphix timeflows +""" + +import re +import sys +from distutils.version import LooseVersion + +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2 import job_context +from delphixpy.v1_10_2.web import database +from delphixpy.v1_10_2.web import snapshot +from delphixpy.v1_10_2.web import timeflow +from delphixpy.v1_10_2.web import vo +from delphixpy.v1_10_2.web.timeflow import bookmark +from lib import dlpx_exceptions +from lib import dx_logging +from lib import get_references + +VERSION = "v.0.3.002" + + +class DxTimeflow: + """ + Shared methods for timeflows + :param engine: A Delphix DDP session object + :type engine: delphixpy.v1_10_2.delphix_engine.DelphixEngine + """ + + def __init__(self, engine): + super().__init__() + self._engine = engine + + def get_timeflow_reference(self, db_name): + """ + :param db_name: The database name to retrieve current_timeflow + :type db_name: str + :return: current_timeflow reference for db_name + """ + db_lst = database.get_all(self._engine) + for db_obj in db_lst: + if db_obj.name == db_name: + return db_obj.current_timeflow + raise dlpx_exceptions.DlpxException( + f"Timeflow reference not " f"found for {db_name}." + ) + + def list_timeflows(self): + """ + Retrieve all timeflows for a given engine + :return: generator containing + delphixpy.v1_10_2.web.objects.OracleTimeflow.OracleTimeflow objects + """ + all_timeflows = timeflow.get_all(self._engine) + for tf_obj in all_timeflows: + try: + tf_obj.name = get_references.find_obj_name( + self._engine, database, tf_obj.container + ) + yield tf_obj + except TypeError as err: + raise dlpx_exceptions.DlpxException( + f"Listing Timeflows encountered an error:\n{err}" + ) + except ( + exceptions.RequestError, + exceptions.JobError, + exceptions.HttpError, + ) as err: + raise dlpx_exceptions.DlpxException(err) + + def create_bookmark(self, bookmark_name, db_name, timestamp=None, location=None): + """ + Create a timeflow bookmark + + :param bookmark_name: Bookmark's name + :type bookmark_name: str + :param db_name: The database name to create the bookmark + :type bookmark_name: str + :param timestamp: Timestamp for the bookmark. + :type timestamp: str Required format is (UTC/Zulu): + YYYY-MM-DDTHH:MM:SS.000Z + :param location: Location which is referenced by the bookmark + """ + tf_create_params = vo.TimeflowBookmarkCreateParameters() + tf_ref = self.get_timeflow_reference(db_name) + if re.search("ORAC", tf_ref, re.IGNORECASE): + tf_create_params.timeflow_point = vo.OracleTimeflowPoint() + elif re.search("MSSql", tf_ref, re.IGNORECASE): + tf_create_params.timeflow_point = vo.MSSqlTimeflowPoint() + elif re.search("ASE", tf_ref, re.IGNORECASE): + tf_create_params.timeflow_point = vo.ASETimeflowPoint() + tf_create_params.name = bookmark_name + tf_create_params.timeflow_point.timeflow = tf_ref + if timestamp is not None: + tf_create_params.timeflow_point.timestamp = timestamp + else: + tf_create_params.timeflow_point.location = location + try: + timeflow.bookmark.create(self._engine, tf_create_params) + except exceptions.RequestError as err: + raise dlpx_exceptions.DlpxException(err.error) + except (exceptions.JobError, exceptions.HttpError): + raise dlpx_exceptions.DlpxException( + f"Fatal exception caught while creating the Timeflow " + f"Bookmark:\n{sys.exc_info()[0]}\n" + ) + + def delete_bookmark(self, bookmark_name): + """ + Delete a Timeflow bookmark + :param bookmark_name: name of the TF bookmark to delete + :param bookmark_name: str + """ + tf_bookmark = get_references.find_obj_by_name( + self._engine, timeflow.bookmark, bookmark_name + ) + try: + timeflow.bookmark.bookmark.delete(self._engine, tf_bookmark.reference) + except exceptions.RequestError as err: + raise dlpx_exceptions.DlpxException(err.error) + except (exceptions.JobError, exceptions.HttpError): + raise dlpx_exceptions.DlpxException( + f"Fatal exception caught while creating the Timeflow " + f"Bookmark:\n{sys.exc_info()[0]}\n" + ) + + def list_tf_bookmarks(self): + """ + Return all Timeflow Bookmarks + :return: generator containing v1_10_2.web.vo.TimeflowBookmark objects + """ + all_bookmarks = timeflow.bookmark.get_all(self._engine) + for tfbm_obj in all_bookmarks: + try: + if tfbm_obj.timestamp is None: + tfbm_obj.timestamp = None + else: + tfbm_obj.timestamp = get_references.convert_timestamp( + self._engine, tfbm_obj.timestamp[:-5] + ) + tfbm_obj + except TypeError: + raise dlpx_exceptions.DlpxException( + f"No timestamp found " f"for {tfbm_obj.name}" + ) + except exceptions.RequestError as err: + dlpx_err = err.error + raise dlpx_exceptions.DlpxException(dlpx_err.action) + + def find_snapshot(self, snap_name): + """ + Method to find a snapshot by name + :param snap_name: Name of the snapshot + :type snap_name: str + :return: snapshot name + """ + snapshots = snapshot.get_all(self._engine) + for snapshot_obj in snapshots: + if str(snapshot_obj.name).startswith(snap_name): + return snapshot_obj.reference + elif str(snapshot_obj.latest_change_point.timestamp).startswith(snap_name): + return snapshot_obj.reference + + def set_timeflow_point( + self, container_obj, timestamp_type, timestamp="LATEST", timeflow_name=None + ): + """ + Returns the reference of the timestamp specified. + :param container_obj: Delphix object containing the + snapshot/timeflow to be provisioned + :type container_obj: + :py:class:`delphixpy.v1_10_2.web.objects.Container.Container` + object + :param timestamp_type: Type of timestamp - SNAPSHOT or TIME + :type timestamp_type: str + :param timestamp: Name of timestamp/snapshot. Default: Latest + :type timestamp: str + :param timeflow_name: Name of the timeflow + :type timeflow_name: TimeflowPointTimestamp + :return: one of the following types depending on timeflow required + TimeflowPointParameters + TimeflowPointSnapshot + TimeflowPointSemantic + """ + timeflow_point_parameters = None + if timestamp_type.upper() == "SNAPSHOT": + if timestamp.upper() == "LATEST": + timeflow_point_parameters = vo.TimeflowPointSemantic() + timeflow_point_parameters.container = container_obj.reference + timeflow_point_parameters.location = "LATEST_SNAPSHOT" + elif timestamp: + snapshot_obj = self.find_snapshot_object(timestamp) + if snapshot_obj: + timeflow_point_parameters = vo.TimeflowPointTimestamp() + timeflow_point_parameters.timeflow = snapshot_obj.timeflow + timeflow_point_parameters.timestamp = ( + snapshot_obj.latest_change_point.timestamp + ) + elif snapshot_obj is None: + raise dlpx_exceptions.DlpxException( + f"Unable to find a suitable time for {timestamp}" + f" for database {container_obj.name}" + ) + elif timestamp_type.upper() == "TIME": + if timestamp.upper() == "LATEST": + timeflow_point_parameters = vo.TimeflowPointSemantic() + timeflow_point_parameters.container = container_obj.reference + timeflow_point_parameters.location = "LATEST_POINT" + elif timestamp: + timeflow_point_parameters = vo.TimeflowPointTimestamp() + timeflow_obj = get_references.find_obj_by_reference( + self._engine, timeflow, container_obj.current_timeflow + ) + timeflow_point_parameters.timeflow = timeflow_obj.reference + timeflow_point_parameters.timestamp = timestamp + else: + raise dlpx_exceptions.DlpxObjectNotFound( + f"Timestamp type {timestamp_type} not found for VDB " + f"{container_obj}. Valid types are snapshot or time." + ) + return timeflow_point_parameters + + def refresh_vdb_tf_bookmark(self, vdb_name, tf_bookmark_name): + """ + Refreshes a VDB from a Timeflow Bookmark + :param vdb_name: Name of the VDB + :type vdb_name: str + :param tf_bookmark_name: Name of the Timeflow Bookmark + :type tf_bookmark_name: str + :return: str reference to the refresh job + """ + try: + vdb_obj = get_references.find_obj_by_name(self._engine, database, vdb_name) + tf_bookmark_obj = get_references.find_obj_by_name( + self._engine, timeflow.bookmark, tf_bookmark_name + ) + except StopIteration as err: + raise dlpx_exceptions.DlpxObjectNotFound(err) + if "ORACLE" in vdb_obj.reference: + tf_params = vo.OracleRefreshParameters() + else: + tf_params = vo.RefreshParameters() + tf_params.timeflow_point_parameters = vo.TimeflowPointBookmark() + tf_params.timeflow_point_parameters.bookmark = tf_bookmark_obj.reference + try: + with job_context.asyncly(self._engine): + database.refresh(self._engine, vdb_obj.reference, tf_params) + return self._engine.last_job + except exceptions.RequestError as err: + raise dlpx_exceptions.DlpxException(err.error.action) + except (exceptions.JobError, exceptions.HttpError) as err: + dx_logging.print_exception( + f"Exception caught during refresh:\n{sys.exc_info()[0]}" + ) + raise dlpx_exceptions.DlpxException(err.error) diff --git a/lib/get_references.py b/lib/get_references.py new file mode 100644 index 0000000..fc662dc --- /dev/null +++ b/lib/get_references.py @@ -0,0 +1,294 @@ +""" +Module that provides lookups of references and names of Delphix objects. +""" + +from datetime import datetime + +from dateutil import tz + +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import database +from delphixpy.v1_10_2.web import group +from delphixpy.v1_10_2.web import job +from delphixpy.v1_10_2.web import repository +from delphixpy.v1_10_2.web import source +from delphixpy.v1_10_2.web import sourceconfig +from delphixpy.v1_10_2.web import vo +from delphixpy.v1_10_2.web.service import time +from lib import dlpx_exceptions + +VERSION = "v.0.3.006" + + +def convert_timestamp(engine, timestamp): + """ + Convert timezone from Zulu/UTC to the Engine's timezone + :param engine: A Delphix engine session object + :type engine: lib.get_session.GetSession object + :param timestamp: the timstamp in Zulu/UTC to be converted + :type timestamp: str + :return: Timestamp converted localtime + """ + + default_tz = tz.gettz("UTC") + engine_tz = time.time.get(engine) + try: + convert_tz = tz.gettz(engine_tz.system_time_zone) + utc = datetime.strptime(timestamp, "%Y-%m-%dT%H:%M:%S") + utc = utc.replace(tzinfo=default_tz) + converted_tz = utc.astimezone(convert_tz) + engine_local_tz = ( + f"{str(converted_tz.date())} " + f"{str(converted_tz.time())} {str(converted_tz.tzname())}" + ) + return engine_local_tz + except TypeError: + return None + + +def get_running_job(engine, object_ref): + """ + Function to find a running job from the DB target reference. + :param engine: A Delphix DDP session object + :type engine: lib.GetSession.GetSession object + :param object_ref: Reference to the object of the running job + :type object_ref: str + :return: reference of the running job(s) + """ + try: + return job.get_all(engine, target=object_ref, job_state="RUNNING")[0].reference + except IndexError: + return None + + +def find_obj_by_name(engine, f_class, obj_name): + """ + Function to find objects by name and object class + :param engine: A Delphix DDP session object + :type engine: lib.GetSession.GetSession object + :param f_class: The objects class. I.E. database or timeflow. + :type f_class: Supported class type by Delphix + :param obj_name: The name of the object + :type obj_name: str + :return: object of f_class type + """ + obj_list = f_class.get_all(engine) + for obj in obj_list: + if obj.name == obj_name: + return obj + raise dlpx_exceptions.DlpxObjectNotFound(f"Object {obj_name} not found.") + + +def find_obj_by_reference(engine, f_class, reference): + """ + Function to find objects by reference and object class + :param engine: A Delphix DDP session object + :type engine: lib.GetSession.GetSession object + :param f_class: The objects class. I.E. database or timeflow. + :type f_class: Supported class type by Delphix + :param obj_name: The refere ce of the object + :type reference: str + :return: object of f_class type + """ + obj_list = f_class.get_all(engine) + for obj in obj_list: + if obj.reference == reference: + return obj + raise dlpx_exceptions.DlpxObjectNotFound( + f"Object with reference {reference} not found." + ) + + +def find_source_by_db_name(engine, obj_name): + """ + Function to find sources by database name and object class, and return + object's reference as a string + :param engine: A Delphix DDP session object + :type engine: lib.GetSession.GetSession object + :param obj_name: The name of the database object in Delphix + :type obj_name: str + :return: The parent DB object + """ + for obj in database.get_all(engine): + if obj.name == obj_name: + source_obj = source.get_all(engine, database=obj.reference) + return source_obj[0] + raise dlpx_exceptions.DlpxObjectNotFound( + f"{obj_name} was not found on " f"engine {engine.address}.\n" + ) + + +def find_obj_name(engine, f_class, obj_reference): + """ + Return the obj name from obj_reference + + :param engine: A Delphix DDP Session object + :type engine: lib.GetSession.GetSession object + :param f_class: The objects class. I.E. database or timeflow + :type f_class: Supported class type by Delphix + :param obj_reference: The object reference to retrieve the name + :type obj_reference: str + :return: str object name + """ + try: + obj_name = f_class.get(engine, obj_reference) + return obj_name.name + except (exceptions.RequestError, exceptions.JobError, exceptions.HttpError) as err: + raise dlpx_exceptions.DlpxException(err) + + +def find_db_repo(engine, install_type, f_environment_ref, f_install_path): + """ + Function to find database repository objects by environment reference and + install path, and return the object's reference as a string + You might use this function to find Oracle and PostGreSQL database repos. + :param engine: A Delphix DDP session object + :type engine: lib.GetSession.GetSession object + :param install_type: Type of install - Oracle, or MSSQL + :type install_type: str + :param f_environment_ref: Reference of the environment for the repository + :type f_install_path: str + :param f_install_path: Path to the installation directory. + :type f_install_path: str + :return: delphixpy.web.vo.SourceRepository object + """ + for obj in repository.get_all(engine, environment=f_environment_ref): + if install_type == "OracleInstall": + if install_type == obj.type and obj.installation_home == f_install_path: + return obj.reference + elif install_type == "MSSqlInstance": + if obj.type == install_type and obj.instance_name == f_install_path: + return obj.reference + elif install_type == "AppDataRepository": + if obj.type == install_type and obj.instance_name == f_install_path: + return obj.reference + else: + raise dlpx_exceptions.DlpxException( + f"Only OracleInstall, AppDataRepository or MSSqlInstance " + f"types are supported.\n" + ) + + +def find_sourceconfig(engine, sourceconfig_name, f_environment_ref): + """ + Function to find database sourceconfig objects by environment reference, + sourceconfig name (db name) and return the object + You might use this function to find Oracle and PostGreSQL database + sourceconfigs. + :param engine: A Delphix DDP session object + :type engine: lib.get_session.GetSession object + :param sourceconfig_name: Name of source config, usually name of db + instance (i.e. orcl) + :type sourceconfig_name: str + :param f_environment_ref: Reference of the environment for the repository + :return: Type is determined by sourceonfig. Found in delphixpy.web.objects + """ + for obj in sourceconfig.get_all(engine, environment=f_environment_ref): + if obj.name == sourceconfig_name: + return obj + raise dlpx_exceptions.DlpxObjectNotFound( + f"No sourceconfig match found for type {sourceconfig_name}.\n" + ) + + +def find_all_databases_by_group(engine, group_name, exclude_js_container=False): + """ + Easy way to quickly find databases by group name + :param engine: A Delphix DDP session object + :type engine: lib.get_session.GetSession object + :param group_name: Name of the group for the database + :type group_name: str + :param exclude_js_container: If set to true, search self-service + containers + :type exclude_js_container: bool + :return: list of :py:class:`delphixpy.web.vo.Container` + """ + # First search groups for the name specified and return its reference + group_ref = find_obj_by_name(engine, group, group_name).reference + if group_ref: + databases = database.get_all( + engine, group=group_ref, no_js_container_data_source=exclude_js_container + ) + return databases + raise dlpx_exceptions.DlpxObjectNotFound( + f"No databases found in " f"group {group_name}.\n" + ) + + +def find_source_by_database(engine, database_obj): + """ + The source tells us if the database is enabled/disabled, virtual, + vdb/dSource, or is a staging database. + :param engine: Delphix DDP Session object + :type engine: lib.get_session.GetSession object + :param database_obj: Delphix database object + :type database_obj: delphixpy.web.vo.Container + """ + source_obj = source.get_all(engine, database=database_obj.reference) + # We'll just do a little sanity check here to ensure we only have a + # 1:1 result. + if not source_obj: + raise dlpx_exceptions.DlpxObjectNotFound( + f'{engine["hostname"]}: Did not find a source for ' f"{database_obj.name}." + ) + elif len(source_obj) > 1: + raise dlpx_exceptions.DlpxException( + f'{engine["hostname"]} More than one source returned for ' + f"{database_obj.name}" + ) + return source_obj + + +def build_data_source_params(dlpx_obj, obj, data_source): + """ + Builds the datasource parameters + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param obj: object type to use when finding db + :type obj: Type of object to build DS params + :param data_source: Name of the database to use when building the + parameters + :type data_source: str + """ + ds_params = vo.JSDataSourceCreateParameters() + ds_params.source = vo.JSDataSource() + ds_params.source.name = data_source + try: + db_obj = find_obj_by_name(dlpx_obj.server_session, obj, data_source) + ds_params.container = db_obj.reference + return ds_params + except exceptions.RequestError as err: + raise dlpx_exceptions.DlpxObjectNotFound( + f"\nCould not find {data_source}\n{err}" + ) + + +def find_all_objects(engine, f_class): + """ + Return all objects from a given class + :param engine: A Delphix engine session object + :type engine: lib.GetSession.GetSession object + :param f_class: The objects class. I.E. database or timeflow. + :return: list + """ + try: + return f_class.get_all(engine) + except (exceptions.JobError, exceptions.HttpError) as err: + raise dlpx_exceptions.DlpxException( + f"{engine.address} Error encountered in {f_class}: {err}\n" + ) + + +def find_obj_list(obj_lst, obj_name): + """ + Function to find an object in a list of objects + :param obj_lst: List containing objects from the get_all() method + :type obj_lst: list + :param obj_name: Name of the object to match + :type obj_name: str + :return: The named object, otherwise, DlpxObjectNotFound + """ + for obj in obj_lst: + if obj_name == obj.name: + return obj + raise dlpx_exceptions.DlpxObjectNotFound(f"Did not find {obj_name}\n") diff --git a/lib/get_session.py b/lib/get_session.py new file mode 100644 index 0000000..4f548c8 --- /dev/null +++ b/lib/get_session.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python3 +# Corey Brune - Oct 2016 +# This class handles the config file and authentication to a DDP +# requirements +# pip install docopt delphixpy + +"""This module takes the conf file for DDP(s) and returns an authentication + object +""" + +import json +import os +import ssl +from time import sleep + +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2 import job_context +from delphixpy.v1_10_2 import web +from delphixpy.v1_10_2.delphix_engine import DelphixEngine +from lib import dlpx_exceptions +from lib import dx_logging + +VERSION = "v.0.3.001" + + +class GetSession: + """ + Class to read configuration and returns an Delphix session object + """ + + def __init__(self): + self.server_session = None + self.dlpx_ddps = {} + self.jobs = {} + + def get_config(self, config_file_path="./config/dxtools.conf"): + """ + This method reads in the dxtools.conf file + + :param config_file_path: path to the configuration file. + :type config_file_path: str + :return: dict containing engine information + """ + # First test to see that the file is there and we can open it + try: + with open(config_file_path) as config_file: + config = json.loads(config_file.read()) + except IOError: + raise dlpx_exceptions.DlpxException( + f"\nERROR: Was unable to open {config_file_path}. Please " + f"check the path and permissions, and try again.\n" + ) + except (ValueError, TypeError, AttributeError) as err: + raise dlpx_exceptions.DlpxException( + f"\nERROR: Was unable to read {config_file_path} as json. " + f"Please check if the file is in a json format and try " + f"again.\n {err}" + ) + for each in config.keys(): + temp_config = config[each].pop() + use_https = temp_config["use_https"] + if use_https and use_https.lower() == "true": + temp_config["use_https"] = True + else: + temp_config["use_https"] = False + self.dlpx_ddps[each] = temp_config + + def dlpx_session( + self, + f_engine_address, + f_engine_username, + f_engine_password=None, + enable_https=True, + ): + """ + Method to setup the session with DDP + :param f_engine_address: The DDP's address (IP/DNS Name) + :type f_engine_address: str + :param f_engine_username: Username to authenticate + :type f_engine_username: str + :param f_engine_password: User's password + :type f_engine_password: str + :param enable_https: Enable or disable HTTPS + :type enable_https: bool + :return: delphixpy.v1_10_2.delphix_engine.DelphixEngine object + """ + f_engine_namespace = "DOMAIN" + # Remove the next 3 lines if using in a production environment. + if not os.environ.get("PYTHONHTTPSVERIFY", "") and getattr( + ssl, "_create_unverified_context", None + ): + ssl._create_default_https_context = ssl._create_unverified_context + try: + self.server_session = DelphixEngine( + f_engine_address, + f_engine_username, + f_engine_password, + f_engine_namespace, + enable_https, + ) + self.server_wait() + except ( + exceptions.HttpError, + exceptions.RequestError, + exceptions.JobError, + ) as err: + raise dlpx_exceptions.DlpxException( + f"ERROR: An error occurred while authenticating to " + f"{f_engine_address}:\n {err}\n" + ) + except (TimeoutError) as err: + raise dlpx_exceptions.DlpxException( + f"ERROR: Timeout while authenticating to " + f"{f_engine_address}:\n {err}\n" + ) + + def job_mode(self, single_thread=True): + """ + This method tells the jobs to run sync or async, based on the + single_thread variable + :param single_thread: Execute application synchronously (True) or + async (False) + Default: True + :type single_thread: Bool + :return: contextlib._GeneratorContextManager + """ + # Synchronously + if single_thread: + return job_context.sync(self.server_session) + # Or asynchronously + elif single_thread is False: + return job_context.asyncly(self.server_session) + + def job_wait(self): + """ + This job stops all work in the thread/process until jobs are + completed. + """ + # Grab all the jos on the server (the last 25, be default) + all_jobs = web.job.get_all(self.server_session) + # For each job in the list, check to see if it is running (not ended) + for job_obj in all_jobs: + if not (job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]): + dx_logging.print_debug( + f"\nDEBUG: Waiting for {job_obj.reference} " + f"(currently: {job_obj.job_state}) to finish running " + f"against the container.\n" + ) + # If so, wait + job_context.wait(self.server_session, job_obj.reference) + + def server_wait(self): + """ + This job waits for a successful connection to DDP. + """ + while True: + try: + web.system.get(self.server_session) + break + except (exceptions.HttpError, exceptions.RequestError): + pass + dx_logging.print_info("Waiting for Delphix DDP to be ready") + sleep(3) diff --git a/lib/run_async.py b/lib/run_async.py new file mode 100644 index 0000000..c6326d9 --- /dev/null +++ b/lib/run_async.py @@ -0,0 +1,33 @@ +def run_async(func): + """ + http://code.activestate.com/recipes/576684-simple-threading-decorator/ + run_async(func) + function decorator, intended to make "func" run in a separate + thread (asynchronously). + Returns the created Thread object + + E.g.: + @run_async + def task1(): + do_something + + @run_async + def task2(): + do_something_too + + t1 = task1() + t2 = task2() + ... + t1.join() + t2.join() + """ + from threading import Thread + from functools import wraps + + @wraps(func) + def async_func(*args, **kwargs): + func_hl = Thread(target=func, args=args, kwargs=kwargs) + func_hl.start() + return func_hl + + return async_func diff --git a/lib/run_job.py b/lib/run_job.py new file mode 100644 index 0000000..c438ab0 --- /dev/null +++ b/lib/run_job.py @@ -0,0 +1,255 @@ +""" +Runs jobs passing a function as an argument. Thread safe. +""" +import time + +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import job +from lib import dlpx_exceptions +from lib import dx_logging + +VERSION = "v.0.3.004" + + +def run_job(main_func, dx_obj, engine="default", single_thread=True): + """ + This method runs the main_func asynchronously against all the + delphix engines specified + :param main_func: function to run against the DDP(s). + In these examples, it's main_workflow(). + :type main_func: function + :param dx_obj: Delphix session object from config + :type dx_obj: lib.get_session.GetSession object + :param engine: name of an engine, all or None + :type engine: str + :param single_thread: Run as single thread (True) or + multiple threads (False) + :type single_thread: bool + """ + threads = [] + # if engine ="all", run against every engine in config_file + if engine == "all": + dx_logging.print_info(f"Executing against all Delphix Engines") + try: + for delphix_ddp in dx_obj.dlpx_ddps: + t = main_func(dx_obj.dlpx_ddps[delphix_ddp], dx_obj, single_thread) + threads.append(t) + # TODO: Revisit threading logic + # This sleep has been tactically added to prevent errors in the parallel + # processing of operations across multiple engines + time.sleep(1) + except dlpx_exceptions.DlpxException as err: + dx_logging.print_exception(f"Error encountered in run_job():{err}") + elif engine == "default": + try: + for delphix_ddp in dx_obj.dlpx_ddps.keys(): + if dx_obj.dlpx_ddps[delphix_ddp]["default"] == "True": + dx_obj_default = dx_obj + dx_obj_default.dlpx_ddps = { + delphix_ddp: dx_obj.dlpx_ddps[delphix_ddp] + } + dx_logging.print_info("Executing against default" "Delphix Engine") + t = main_func(dx_obj.dlpx_ddps[delphix_ddp], dx_obj, single_thread) + threads.append(t) + break + except TypeError as err: + raise dlpx_exceptions.DlpxException(f"Error in run_job: {err}") + else: + # Test to see if the engine exists in config_file + try: + engine_ref = dx_obj.dlpx_ddps[engine] + t = main_func(engine_ref, dx_obj, single_thread) + threads.append(t) + dx_logging.print_info( + f"Executing against Delphix Engine: " f'{engine_ref["ip_address"]}' + ) + except (exceptions.RequestError, KeyError): + raise dlpx_exceptions.DlpxException( + f"\nERROR: Delphix DDP {engine} cannot be found. Please " + f"check your input and try again." + ) + if engine is None: + raise dlpx_exceptions.DlpxException(f"ERROR: No default Delphix " f"DDP found.") + return threads + + +def run_job_mt(main_func, dx_obj, engine="default", single_thread=True): + """ + This method runs the main_func asynchronously against all the + delphix engines specified + :param main_func: function to run against the DDP(s). + In these examples, it's main_workflow(). + :type main_func: function + :param dx_obj: Delphix session object from config + :type dx_obj: lib.get_session.GetSession object + :param engine: name of an engine, all or None + :type engine: str + :param single_thread: Run as single thread (True) or + multiple threads (False) + :type single_thread: bool + """ + threads = [] + # if engine ="all", run against every engine in config_file + if engine == "all": + dx_logging.print_info(f"Executing against all Delphix Engines") + try: + for delphix_ddp in dx_obj.dlpx_ddps: + engine_ref = dx_obj.dlpx_ddps[delphix_ddp] + dx_obj.jobs[engine_ref["ip_address"]] = [] + t = main_func(dx_obj.dlpx_ddps[delphix_ddp], dx_obj, single_thread) + threads.append(t) + # TODO: Revisit threading logic + # This sleep has been tactically added to prevent errors in the parallel + # processing of operations across multiple engines + time.sleep(2) + except dlpx_exceptions.DlpxException as err: + dx_logging.print_exception(f"Error encountered in run_job():\n{err}") + raise err + elif engine == "default": + try: + for delphix_ddp in dx_obj.dlpx_ddps.keys(): + is_default = dx_obj.dlpx_ddps[delphix_ddp]["default"] + if is_default and is_default.lower() == "true": + dx_obj_default = dx_obj + dx_obj_default.dlpx_ddps = { + delphix_ddp: dx_obj.dlpx_ddps[delphix_ddp] + } + engine_ref = dx_obj.dlpx_ddps[delphix_ddp] + dx_obj.jobs[engine_ref["ip_address"]] = [] + dx_logging.print_info(f"Executing against default Delphix Engine") + t = main_func(dx_obj.dlpx_ddps[delphix_ddp], dx_obj, single_thread) + threads.append(t) + break + except TypeError as err: + raise dlpx_exceptions.DlpxException(f"Error in run_job: {err}") + except (dlpx_exceptions.DlpxException) as e: + dx_logging.print_exception(f"Error in run_job():\n{e}") + raise e + else: + # Test to see if the engine exists in config_file + try: + engine_ref = dx_obj.dlpx_ddps[engine] + dx_obj.jobs[engine_ref["ip_address"]] = [] + t = main_func(engine_ref, dx_obj, single_thread) + threads.append(t) + dx_logging.print_info( + f"Executing against Delphix Engine: " f'{engine_ref["ip_address"]}' + ) + except (exceptions.RequestError, KeyError): + raise dlpx_exceptions.DlpxException( + f"\nERROR: Delphix DDP {engine} cannot be found. Please " + f"check your input and try again." + ) + except (dlpx_exceptions.DlpxException) as e: + dx_logging.print_exception(f"Error in run_job():\n{e}") + raise e + if engine is None: + raise dlpx_exceptions.DlpxException(f"ERROR: No default Delphix " f"DDP found.") + return threads + + +def track_running_jobs(engine, dx_obj, poll=10): + """ + Retrieves running job state + :param engine: Dictionary containing info on the DDP (IP, username, etc.) + :param poll: How long to sleep between querying jobs + :param dx_obj: Delphix session object from config + :type dx_obj: lib.get_session.GetSession object + :type poll: int + :return: + """ + # get all the jobs, then inspect them + dx_logging.print_info(f'checking running jobs on engine: {engine["hostname"]}') + engine_running_jobs = dx_obj.jobs[engine["ip_address"]] + while engine_running_jobs: + for j in engine_running_jobs: + job_obj = job.get(dx_obj.server_session, j) + if job_obj.job_state in ["COMPLETED"]: + engine_running_jobs.remove(j) + dx_logging.print_info( + f'Engine: {engine["hostname"]}: {job_obj.reference} is 100% COMPLETE' + ) + elif job_obj.job_state in ["CANCELED", "FAILED"]: + engine_running_jobs.remove(j) + dx_logging.print_info( + f'Engine: {engine["hostname"]}: {job_obj.reference} was CANCELLED or FAILED due to an error' + ) + # raise dlpx_exceptions.DlpxException('Job {job_obj.job_id} {job_obj.job_state}') + elif job_obj.job_state in "RUNNING": + dx_logging.print_info( + f'Engine: {engine["hostname"]}: {job_obj.reference} is RUNNING and {job_obj.percent_complete}% complete ' + ) + if dx_obj.jobs: + time.sleep(poll) + + +def find_job_state(engine, dx_obj, poll=5): + """ + Retrieves running job state + :param engine: Dictionary containing info on the DDP (IP, username, etc.) + :param poll: How long to sleep between querying jobs + :param dx_obj: Delphix session object from config + :type dx_obj: lib.get_session.GetSession object + :type poll: int + :return: + """ + # get all the jobs, then inspect them + dx_logging.print_info(f"Checking running jobs state") + i = 0 + for j in dx_obj.jobs.keys(): + print(len(dx_obj.jobs), j) + job_obj = job.get(dx_obj.server_session, dx_obj.jobs[j]) + dx_logging.print_info( + f'{engine["ip_address"]}: Running job: ' f"{job_obj.job_state}" + ) + if job_obj.job_state in ["CANCELED", "COMPLETED", "FAILED"]: + # If the job is in a non-running state, remove it + # from the running jobs list. + del dx_obj.jobs[j] + if len(dx_obj.jobs) == 0: + break + elif job_obj.job_state in "RUNNING": + # If the job is in a running state, increment the + # running job count. + i += 1 + dx_logging.print_info(f'{engine["ip_address"]}: {i} jobs running.') + # If we have running jobs, pause before repeating the + # checks. + if dx_obj.jobs: + time.sleep(poll) + else: + dx_logging.print_info(f"No jobs running") + break + + +def find_job_state_by_jobid(engine, dx_obj, job_id, poll=20): + """ + Retrieves running job state + :param engine: Dictionary containing info on the DDP (IP, username, etc.) + :param poll: How long to sleep between querying jobs + :param dx_obj: Delphix session object from config + :type dx_obj: lib.get_session.GetSession object + :param job_id: Job ID to check the state + :type poll: int + :return: + """ + # get the job object + job_obj = job.get(dx_obj.server_session, job_id) + dx_logging.print_debug(job_obj) + dx_logging.print_info(f" Waiting for : {job_id} to finish") + while job_obj.job_state == "RUNNING": + time.sleep(poll) + job_obj = job.get(dx_obj.server_session, job_id) + dx_logging.print_info(f"Job: {job_id} completed with status: {job_obj.job_state}") + return job_obj.job_state + + +def time_elapsed(time_start): + """ + This function calculates the time elapsed since the beginning of the script. + Call this anywhere you want to note the progress in terms of time + + :param time_start: start time of the script. + :type time_start: float + """ + return round((time.time() - time_start) / 60, +1) diff --git a/list_all_databases.py b/list_all_databases.py deleted file mode 100755 index f3c52e5..0000000 --- a/list_all_databases.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.web import database - - -server_session= DelphixEngine("landsharkengine", "delphix_admin", "landshark", "DOMAIN") - -all_databases = database.get_all(server_session) - -#print all_databases - -print str(len(all_databases)) + " databases in the LandsharkEngine" - -for each in all_databases: - print each.name \ No newline at end of file diff --git a/logs/.keep b/logs/.keep new file mode 100644 index 0000000..e69de29 diff --git a/simple_snapshot.py b/simple_snapshot.py deleted file mode 100755 index a726aa7..0000000 --- a/simple_snapshot.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.web import group, database -group_name = "Dev Copies" -database_name = "Employee DB - Dev" - -server_session = DelphixEngine("landsharkengine", "delphix_admin", "landshark", "DOMAIN") - -all_groups = group.get_all(server_session) - -for each in all_groups: - if group_name == each.name: - group_reference = each.reference - break - -database_objs = database.get_all(server_session, group=group_reference) - -for obj in database_objs: - if database_name == obj.name: - database_reference = obj.reference - break - -database.sync(server_session, database_reference) \ No newline at end of file diff --git a/snapshot_group.py b/snapshot_group.py deleted file mode 100755 index 17f1ce3..0000000 --- a/snapshot_group.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.web import group, database -from delphixpy.v1_6_0 import job_context -group_name = "Dev Copies" -#database_name = "Employee DB - Dev" - -server_session = DelphixEngine("landsharkengine", "delphix_admin", "landshark", "DOMAIN") - -all_groups = group.get_all(server_session) - -for each in all_groups: - if group_name == each.name: - group_reference = each.reference - break - -database_objs = database.get_all(server_session, group=group_reference) - -with job_context.async(server_session): - for obj in database_objs: - database.sync(server_session, obj.reference) \ No newline at end of file diff --git a/ss_bookmark.py b/ss_bookmark.py new file mode 100755 index 0000000..b3b3b06 --- /dev/null +++ b/ss_bookmark.py @@ -0,0 +1,465 @@ +#!/usr/bin/env python3 +# Create and manage Self-Service Bookmarks +# Copyright (c) 2019 by Delphix. +# All rights reserved. +# See http://docs.delphix.com/display/PS/Copyright+Statement for details +# +# Delphix Support statement available at +# See http://docs.delphix.com/display/PS/PS+Script+Support+Policy for details +# +# Warranty details provided in external file +# for customers who have purchased support. +# +"""Creates, lists, removes a Self Service Bookmark +Usage: + ss_bookmark.py (--create_bookmark --data_layout \ + [--tags --description --branch_name ] | \ + --list [--tags ] | \ + --delete_bookmark | \ + --activate_bookmark | \ + --update_bookmark | \ + --share_bookmark | \ + --unshare_bookmark ) + [--engine ] + [--single_thread ] + [--poll ] + [--config ] + [--logdir ] + ss_bookmark.py -h | --help | -v | --version + +Creates, Lists, Removes a Self Service Bookmark + +Examples: + ss_bookmark.py --list + ss_bookmark.py --list --tags "Jun 17, 25pct" + ss_bookmark.py --create_bookmark ssbookmark1 --data_layout jstemplate1 + ss_bookmark.py --create_bookmark ssbookmark2 --data_layout sstest1 \ + --tags bug234 --description "aftercommit" + ss_bookmark.py --create_bookmark ssbookmark1 --data_layout jstemplate1 \ + --branch_name jsbranch1 + ss_bookmark.py --activate_bookmark ssbookmark1 + ss_bookmark.py --update_bookmark ssbookmark1 + ss_bookmark.py --delete_bookmark ssbookmark1 + ss_bookmark.py --share_bookmark ssbookmark1 + ss_bookmark.py --unshare_bookmark ssbookmark1 + +Options: + --create_bookmark Name of the new SS Bookmark + --container_name Name of the container to use + --tags Tags to use for this bookmark (comma-delimited) + --description Description of this bookmark + --update_bookmark Name of the bookmark to update + --share_bookmark Name of the bookmark to share. + --unshare_bookmark Name of the bookmark to unshare. + --branch_name Optional: Name of the branch to use + --data_layout Name of the data layout (container or template) + --activate_bookmark Name of the bookmark to activate + --delete_bookmark Delete the SS Bookmark + --list List the bookmarks on a given DDP + --engine Alt Identifier of Delphix DDP in dxtools.conf. + [default: default] + --single_thread Run as a single thread. False if running multiple + threads. + [default: True] + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./config/dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./logs/ss_bookmark.log] + -h --help Show this screen. + -v --version Show version. +""" +import sys +import time +from os.path import basename + +import docopt + +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import selfservice +from delphixpy.v1_10_2.web import vo +from lib import dlpx_exceptions +from lib import dx_logging +from lib import get_references +from lib import get_session +from lib import run_job +from lib.run_async import run_async + +VERSION = "v.0.3.004" + + +def create_bookmark( + dlpx_obj, + bookmark_name, + source_layout, + branch_name=None, + tags=None, + description=None, + type="container", +): + """ + Create the Self Service Bookmark + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param bookmark_name: Name of the bookmark to create + :type bookmark_name: str + :param source_layout: Name of the source (template or container) to use + :type source_layout: str + :param branch_name: Name of the branch to use + :type branch_name: str + :param tags: Tag names to create the bookmark. Use commas to break up + different tags + :type tags: str + :param description: Description of the bookmark + :type description: str + """ + bookmark_ref = None + ss_bookmark_params = vo.JSBookmarkCreateParameters() + ss_bookmark_params.bookmark = vo.JSBookmark() + ss_bookmark_params.bookmark.name = bookmark_name + if branch_name: + try: + data_layout_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.template, source_layout + ) + except dlpx_exceptions.DlpxObjectNotFound: + data_layout_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.container, source_layout + ) + for branch_obj in selfservice.branch.get_all(dlpx_obj.server_session): + if ( + branch_name == branch_obj.name + and data_layout_obj.reference == branch_obj.data_layout + ): + ss_bookmark_params.bookmark.branch = branch_obj.reference + break + if ss_bookmark_params.bookmark.branch is None: + raise dlpx_exceptions.DlpxException( + f"{branch_name} was not found. Set the --data_layout " + f"parameter to the Self Service Template for the bookmark.\n" + ) + elif branch_name is None: + try: + if type == "container": + data_layout_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.container, source_layout + ) + else: + data_layout_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.template, source_layout + ) + ss_bookmark_params.bookmark.branch = data_layout_obj.active_branch + except (dlpx_exceptions.DlpxException, exceptions.RequestError): + raise dlpx_exceptions.DlpxException( + f"Could not find a default branch in engine {dlpx_obj.server_session.address}" + ) + if tags: + ss_bookmark_params.bookmark.tags = tags.split(",") + if description: + ss_bookmark_params.bookmark.description = description + ss_bookmark_params.timeline_point_parameters = vo.JSTimelinePointLatestTimeInput() + ss_bookmark_params.timeline_point_parameters.source_data_layout = ( + data_layout_obj.reference + ) + try: + bookmark_ref = selfservice.bookmark.create( + dlpx_obj.server_session, ss_bookmark_params + ) + dlpx_obj.jobs[dlpx_obj.server_session.address].append( + dlpx_obj.server_session.last_job + ) + except ( + dlpx_exceptions.DlpxException, + exceptions.RequestError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f"\nThe bookmark {bookmark_name} was not " f"created. The error was:\n{err}" + ) + raise dlpx_exceptions.DlpxException( + f"The bookmark {bookmark_name} was not created.\n ERROR: {err}" + ) + dx_logging.print_info(f"SS Bookmark {bookmark_name} was created " f"successfully.") + return bookmark_ref + + +def list_bookmarks(dlpx_obj, tags=None): + """ + List all bookmarks on a given engine + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param tags: Only list bookmarks with given tag + :type tags: str + """ + header = "\nName, Reference, Branch Name, Template Name, Tags" + tag_filter = None + try: + ss_bookmarks = selfservice.bookmark.get_all(dlpx_obj.server_session) + print(header) + for ss_bookmark in ss_bookmarks: + branch_name = get_references.find_obj_name( + dlpx_obj.server_session, selfservice.branch, ss_bookmark.branch + ) + if tags: + tag_filter = [x.strip() for x in tags.split(",")] + if tag_filter is None: + tag = ss_bookmark.tags if ss_bookmark.tags else None + if tag: + tag = ", ".join(tag for tag in ss_bookmark.tags) + print( + f"{ss_bookmark.name}, {ss_bookmark.reference}," + f"{branch_name}, {ss_bookmark.template_name}, {tag}" + ) + elif all(tag in ss_bookmark.tags for tag in tag_filter): + print( + f"{ss_bookmark.name}, {ss_bookmark.reference}," + f"{branch_name}, {ss_bookmark.template_name}", + f'{", ".join(tag for tag in ss_bookmark.tags)}', + ) + print("\n") + except ( + dlpx_exceptions.DlpxException, + exceptions.HttpError, + exceptions.RequestError, + ) as err: + dx_logging.print_exception( + f"\nERROR: The bookmarks on could not be " + f"listed. The error was:\n\n{err}" + ) + + +def unshare_bookmark(dlpx_obj, bookmark_name): + """ + Unshare a bookmark + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param bookmark_name: Name of the bookmark to share + :type bookmark_name: str + """ + try: + selfservice.bookmark.unshare( + dlpx_obj.server_session, + get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.bookmark, bookmark_name + ).reference, + ) + dx_logging.print_info(f"Bookmark {bookmark_name} was unshared successfully.") + except ( + dlpx_exceptions.DlpxException, + exceptions.HttpError, + exceptions.RequestError, + ) as err: + dx_logging.print_exception( + f"\nERROR: {bookmark_name} could not be unshared. The error was:\n{err}" + ) + + +def share_bookmark(dlpx_obj, bookmark_name): + """ + Share a bookmark + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param bookmark_name: Name of the bookmark to share + :type bookmark_name: str + """ + try: + selfservice.bookmark.share( + dlpx_obj.server_session, + get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.bookmark, bookmark_name + ).reference, + ) + dx_logging.print_info(f"{bookmark_name} was shared successfully.") + except (exceptions.HttpError, exceptions.RequestError) as err: + dx_logging.print_exception( + f"ERROR: {bookmark_name} could not be shared. The error was:\n{err}" + ) + + +def update_bookmark(dlpx_obj, bookmark_name): + """ + Updates a bookmark + + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param bookmark_name: Name of the bookmark to update + :type bookmark_name: str + """ + ss_bookmark_obj = vo.JSBookmark() + try: + selfservice.bookmark.update( + dlpx_obj.server_session, + get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.bookmark, bookmark_name + ).reference, + ss_bookmark_obj, + ) + except ( + dlpx_exceptions.DlpxException, + exceptions.HttpError, + exceptions.RequestError, + ) as err: + dx_logging.print_exception( + f"ERROR: {bookmark_name} could not be updated. The error was:\n{err}" + ) + + +def delete_bookmark(dlpx_obj, bookmark_name): + """ + Deletes a bookmark + + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.get_session.GetSession object + :param bookmark_name: Bookmark to delete + :type bookmark_name: str + """ + try: + selfservice.bookmark.delete( + dlpx_obj.server_session, + get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.bookmark, bookmark_name + ).reference, + ) + dx_logging.print_info(f"{bookmark_name} was deleted successfully.") + except ( + dlpx_exceptions.DlpxException, + exceptions.HttpError, + exceptions.RequestError, + ) as err: + dx_logging.print_exception( + f"ERROR: The bookmark {bookmark_name} " + f"was not deleted. The error was:\n{err}" + ) + + +@run_async +def main_workflow(engine, dlpx_obj, single_thread): + """ + This function is where we create our main workflow. + Use the @run_async decorator to run this function asynchronously. + The @run_async decorator allows us to run against multiple Delphix Engine + simultaneously + + :param engine: Dictionary of engines + :type engine: dictionary + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param single_thread: True - run single threaded, False - run multi-thread + :type single_thread: bool + """ + try: + # Setup the connection to the Delphix DDP + dlpx_obj.dlpx_session( + engine["ip_address"], + engine["username"], + engine["password"], + engine["use_https"], + ) + except dlpx_exceptions.DlpxException as err: + dx_logging.print_exception( + f"ERROR: ss_bookmark encountered an error " + f'authenticating to {engine["hostname"]} ' + f'{ARGUMENTS["--target"]}:\n{err}\n' + ) + try: + with dlpx_obj.job_mode(single_thread): + if ARGUMENTS["--create_bookmark"]: + create_bookmark( + dlpx_obj, + ARGUMENTS["--create_bookmark"], + ARGUMENTS["--data_layout"], + ARGUMENTS["--branch_name"] if ARGUMENTS["--branch_name"] else None, + ARGUMENTS["--tags"] if ARGUMENTS["--tags"] else None, + ARGUMENTS["--description"] if ARGUMENTS["--description"] else None, + ) + elif ARGUMENTS["--delete_bookmark"]: + delete_bookmark(dlpx_obj, ARGUMENTS["--delete_bookmark"]) + elif ARGUMENTS["--update_bookmark"]: + update_bookmark(dlpx_obj, ARGUMENTS["--update_bookmark"]) + elif ARGUMENTS["--share_bookmark"]: + share_bookmark(dlpx_obj, ARGUMENTS["--share_bookmark"]) + elif ARGUMENTS["--unshare_bookmark"]: + unshare_bookmark(dlpx_obj, ARGUMENTS["--unshare_bookmark"]) + elif ARGUMENTS["--list"]: + list_bookmarks( + dlpx_obj, + ARGUMENTS["--tags"] if ARGUMENTS["--tags"] else None, + ) + except ( + dlpx_exceptions.DlpxException, + exceptions.RequestError, + exceptions.JobError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f"Error in ss_bookmark:" f'{engine["ip_address"]}\n ERROR: {err}' + ) + raise err + run_job.track_running_jobs(engine, dlpx_obj) + + +def main(): + """ + main function - creates session and runs jobs + """ + time_start = time.time() + try: + dx_session_obj = get_session.GetSession() + dx_logging.logging_est(ARGUMENTS["--logdir"]) + config_file_path = ARGUMENTS["--config"] + single_thread = ARGUMENTS["--single_thread"] + engine = ARGUMENTS["--engine"] + dx_session_obj.get_config(config_file_path) + for each in run_job.run_job_mt( + main_workflow, dx_session_obj, engine, single_thread + ): + each.join() + # run_job.run_job_mt( main_workflow, dx_session_obj, engine, single_thread) + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"script took {elapsed_minutes} minutes to " f"get this far." + ) + # Here we handle what we do when the unexpected happens + except SystemExit as err: + # This is what we use to handle our sys.exit(#) + sys.exit(err) + + except dlpx_exceptions.DlpxException as err: + # We use this exception handler when an error occurs in a function + # call. + dx_logging.print_exception(f"Errow while executing the bookmark operation") + sys.exit(2) + + except exceptions.HttpError as err: + # We use this exception handler when our connection to Delphix fails + dx_logging.print_exception( + f"ERROR: Connection failed to the Delphix DDP. Please check " + f"the ERROR message below:\n{err.status}" + ) + sys.exit(2) + + except exceptions.JobError as err: + # We use this exception handler when a job fails in Delphix so that we + # have actionable data + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_exception( + f"A job failed in the Delphix Engine:\n{err.job}." + f"{basename(__file__)} took {elapsed_minutes} minutes to complete" + ) + sys.exit(3) + + except KeyboardInterrupt: + # We use this exception handler to gracefully handle ctrl+c exits + dx_logging.print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"{basename(__file__)} took {elapsed_minutes} minutes to complete." + ) + + +if __name__ == "__main__": + # Grab our ARGUMENTS from the doc at the top of the script + ARGUMENTS = docopt.docopt(__doc__, version=basename(__file__) + " " + VERSION) + # Feed our ARGUMENTS to the main function, and off we go! + main() diff --git a/ss_branch.py b/ss_branch.py new file mode 100755 index 0000000..5fafa80 --- /dev/null +++ b/ss_branch.py @@ -0,0 +1,410 @@ +#!/usr/bin/env python3 +# Program Name : ss_branch.py +# Description : Delphix implementation script +# Author : Corey Brune +# Created: March 4 2016 +# +# Copyright (c) 2016 by Delphix. +# All rights reserved. +# See http://docs.delphix.com/display/PS/Copyright+Statement for details +# +# Delphix Support statement available at +# See http://docs.delphix.com/display/PS/PS+Script+Support+Policy for details +# +# Warranty details provided in external file +# for customers who have purchased support. +# +"""Creates, updates, deletes, activates and lists branches +Usage: + ss_branch.py (--create_branch --container_name + [--template_name | --bookmark_name ] [--timestamp ] | \ + --list | --delete_branch | --activate_branch + ) + [--single_thread ] + [--engine ] + [--config ] + [--logdir ] + ss_branch.py -h | --help | -v | --version + +Creates, Lists, Removes a Jet Stream Branch + +Examples: + ss_branch.py --list + ss_branch.py --create_branch jsbranch1 --container_name jscontainer \ + --template_name jstemplate1 + ss_branch.py --activate_branch jsbranch1 + ss_branch.py --delete_branch jsbranch1 + ss_branch.py --create_branch tb4 --container_name dc1 \ + --timestamp "2021-02-07T04:34:48.952Z" + +Options: + --create_branch Name of the new JS Branch + --bookmark_name Name of the container to use + --template_name Name of the template to use + --timestamp Timestamp on active branch to create branch from + --activate_branch Name of the branch to activate + --delete_branch Delete the JS Branch + --list List the branchs on a given engine + --engine Alt Identifier of Delphix engine in dxtools.conf. + [default: default] + --single_thread Asynchronous/Synchronous mode + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./config/dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./logs/ss_branch.log] + -h --help Show this screen. + -v --version Show version. +""" +import datetime +import re +import sys +import time +from os.path import basename + +import docopt + +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import selfservice +from delphixpy.v1_10_2.web import vo +from lib import dlpx_exceptions +from lib import dx_logging +from lib import get_references +from lib import get_session +from lib import run_job +from lib.run_async import run_async + +VERSION = "v.0.3.001" + + +def create_branch( + dlpx_obj, + branch_name, + container_name, + template_name=None, + bookmark_name=None, + timestamp=None, +): + """ + Create a Self-Service Branch + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession object + :param branch_name: Name of the branch to create + :type branch_name: str + :param container_name: Name of the container to use + :type container_name: str + :param template_name: Name of the template to use + :type template_name: str + :param bookmark_name: Name of the bookmark to use + :type bookmark_name: str + """ + ss_branch = vo.JSBranchCreateParameters() + ss_branch.name = branch_name + data_container_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.container, container_name + ) + ss_branch.data_container = data_container_obj.reference + if timestamp: + ss_branch.timeline_point_parameters = vo.JSTimelinePointTimeInput() + ss_branch.timeline_point_parameters.time = timestamp + ss_branch.timeline_point_parameters.branch = data_container_obj.active_branch + elif bookmark_name: + ss_branch.timeline_point_parameters = vo.JSTimelinePointBookmarkInput() + ss_branch.timeline_point_parameters.bookmark = get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.bookmark, bookmark_name + ).reference + elif template_name or container_name: + if template_name: + source_layout_ref = get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.template, template_name + ).reference + else: + source_layout_ref = get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.container, container_name + ).reference + ss_branch.timeline_point_parameters = vo.JSTimelinePointLatestTimeInput() + ss_branch.timeline_point_parameters.source_data_layout = source_layout_ref + try: + selfservice.branch.create(dlpx_obj.server_session, ss_branch) + dlpx_obj.jobs[dlpx_obj.server_session.address].append( + dlpx_obj.server_session.last_job + ) + except (exceptions.RequestError, exceptions.HttpError) as err: + raise dlpx_exceptions.DlpxException( + f"The branch was not created. The error was:\n{err}" + ) + dx_logging.print_info(f"Self Service Branch {branch_name} is being created\n") + + +def list_branches(dlpx_obj): + """ + List all branches on a given engine + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession object + """ + try: + # header = "\nBranch Name Data Layout Reference End Time" + js_data_layout = "" + ss_branches = selfservice.branch.get_all(dlpx_obj.server_session) + if ss_branches: + dx_logging.print_info("=" * 130) + header = "{:<25} {:<25} {:<25} {:<25} {:<25}".format( + "Branch Name", + "Data Layout", + "Layout Type", + " Branch Reference", + "End Time", + ) + dx_logging.print_info(header) + dx_logging.print_info("-" * 130) + for ss_branch in ss_branches: + js_end_time = selfservice.operation.get( + dlpx_obj.server_session, ss_branch.first_operation + ).end_time + js_obj_type = "CONTAINER" + if re.search("TEMPLATE", ss_branch.data_layout): + js_data_layout = get_references.find_obj_name( + dlpx_obj.server_session, + selfservice.template, + ss_branch.data_layout, + ) + js_obj_type = "TEMPLATE" + elif re.search("CONTAINER", ss_branch.data_layout): + js_data_layout = get_references.find_obj_name( + dlpx_obj.server_session, + selfservice.container, + ss_branch.data_layout, + ) + dx_logging.print_info( + "{:<25} {:<25} {:<25} {:<25} {:<25}".format( + ss_branch._name[0], + js_data_layout, + js_obj_type, + ss_branch.reference, + js_end_time, + ) + ) + dx_logging.print_info("=" * 130) + else: + dx_logging.print_info(f"No branches found on engine.") + + except dlpx_exceptions.DlpxException as err: + raise ( + f"ERROR: Self Service Branches could not be listed. The error " + f"was:\n{err}" + ) + + +def update_branch(dlpx_obj, branch_name): + """ + Updates a branch + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession object + :param branch_name: Name of the branch to update + :type branch_name: str + """ + + ss_branch_obj = vo.JSBranch() + try: + branch_obj = get_references.find_obj_name( + dlpx_obj.server_session, selfservice.branch, branch_name + ) + selfservice.branch.update( + dlpx_obj.server_session, branch_obj.reference, ss_branch_obj + ) + except ( + dlpx_exceptions.DlpxException, + exceptions.HttpError, + exceptions.RequestError, + ) as err: + raise dlpx_exceptions.DlpxException( + f"ERROR: The branch could not be " f"updated. The error was:{err}" + ) + dx_logging.print_info(f"The branch {branch_name} was updated " f"successfully.\n") + + +def activate_branch(dlpx_obj, branch_name): + """ + Activates a branch + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession object + :param branch_name: Name of the branch to activate + :type branch_name: str + """ + try: + branch_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.branch, branch_name + ) + selfservice.branch.activate(dlpx_obj.server_session, branch_obj.reference) + dlpx_obj.jobs[dlpx_obj.server_session.address].append( + dlpx_obj.server_session.last_job + ) + except exceptions.RequestError as err: + raise dlpx_exceptions.DlpxException( + f"ERROR: An error occurred activating the {branch_name}:\n{err}" + ) + dx_logging.print_info(f"The branch {branch_name} was activated " f"successfully.\n") + + +def delete_branch(dlpx_obj, branch_name): + """ + Deletes a branch + :param dlpx_obj: Virtualization Engine session object + :type dlpx_obj: lib.GetSession.GetSession object + :param branch_name: Branch to delete + :type branch_name: str + """ + + try: + branch_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.branch, branch_name + ) + selfservice.branch.delete(dlpx_obj.server_session, branch_obj.reference) + dlpx_obj.jobs[dlpx_obj.server_session.address].append( + dlpx_obj.server_session.last_job + ) + except ( + dlpx_exceptions.DlpxException, + exceptions.HttpError, + exceptions.RequestError, + ) as err: + dx_logging.print_exception(f"The branch could not be deleted: \n ERROR:{err}") + raise dlpx_exceptions.DlpxException( + f"ERROR: The branch was not deleted. The error was:\n{err}" + ) + except Exception as err: + dx_logging.print_exception(f"The branch could not be deleted: \n ERROR:{err}") + raise err + + +@run_async +def main_workflow(engine, dlpx_obj, single_thread): + """ + This function is where we create our main workflow. + Use the @run_async decorator to run this function asynchronously. + The @run_async decorator allows us to run against multiple Delphix Engine + simultaneously + :param engine: Dictionary of engines + :type engine: dictionary + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param single_thread: True - run single threaded, False - run multi-thread + :type single_thread: bool + """ + try: + # Setup the connection to the Delphix DDP + dlpx_obj.dlpx_session( + engine["ip_address"], + engine["username"], + engine["password"], + engine["use_https"], + ) + except dlpx_exceptions.DlpxException as err: + dx_logging.print_exception( + f"ERROR: dx_refresh_vdb encountered an error authenticating to " + f'{engine["hostname"]} {ARGUMENTS["--target"]}:\n{err}\n' + ) + try: + with dlpx_obj.job_mode(single_thread): + if ARGUMENTS["--create_branch"]: + create_branch( + dlpx_obj, + ARGUMENTS["--create_branch"], + ARGUMENTS["--container_name"], + ARGUMENTS["--template_name"], + ARGUMENTS["--bookmark_name"], + ARGUMENTS["--timestamp"], + ) + elif ARGUMENTS["--delete_branch"]: + delete_branch(dlpx_obj, ARGUMENTS["--delete_branch"]) + elif ARGUMENTS["--activate_branch"]: + activate_branch(dlpx_obj, ARGUMENTS["--activate_branch"]) + elif ARGUMENTS["--list"]: + list_branches(dlpx_obj) + run_job.track_running_jobs(engine, dlpx_obj) + except ( + dlpx_exceptions.DlpxException, + dlpx_exceptions.DlpxObjectNotFound, + exceptions.RequestError, + exceptions.JobError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f"Error in ss_branch:" f'{engine["ip_address"]}\n{err}' + ) + raise err + + +def main(): + """ + main function - creates session and runs jobs + """ + time_start = time.time() + try: + dx_session_obj = get_session.GetSession() + dx_logging.logging_est(ARGUMENTS["--logdir"]) + config_file_path = ARGUMENTS["--config"] + single_thread = ARGUMENTS["--single_thread"] + engine = ARGUMENTS["--engine"] + dx_session_obj.get_config(config_file_path) + # This is the function that will handle processing main_workflow for + # all the servers. + for each in run_job.run_job_mt( + main_workflow, dx_session_obj, engine, single_thread + ): + # join them back together so that we wait for all threads to + # complete + each.join() + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"script took {elapsed_minutes} minutes to " f"get this far." + ) + # Here we handle what we do when the unexpected happens + except SystemExit as err: + # This is what we use to handle our sys.exit(#) + sys.exit(err) + + except dlpx_exceptions.DlpxException as err: + # We use this exception handler when an error occurs in a function + # call. + dx_logging.print_exception( + f"ERROR: Please check the ERROR message " f"below:\n {err.error}" + ) + sys.exit(2) + + except exceptions.HttpError as err: + # We use this exception handler when our connection to Delphix fails + dx_logging.print_exception( + f"ERROR: Connection failed to the Delphix DDP. Please check " + f"the ERROR message below:\n{err.status}" + ) + sys.exit(2) + + except exceptions.JobError as err: + # We use this exception handler when a job fails in Delphix so that we + # have actionable data + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_exception( + f"A job failed in the Delphix Engine:\n{err.job}." + f"{basename(__file__)} took {elapsed_minutes} minutes to get " + f"this far" + ) + sys.exit(3) + + except KeyboardInterrupt: + # We use this exception handler to gracefully handle ctrl+c exits + dx_logging.print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"{basename(__file__)} took {elapsed_minutes} " f"minutes to get this far." + ) + + +if __name__ == "__main__": + # Grab our ARGUMENTS from the doc at the top of the script + ARGUMENTS = docopt.docopt(__doc__, version=basename(__file__) + " " + VERSION) + # Feed our ARGUMENTS to the main function, and off we go! + main() diff --git a/ss_container.py b/ss_container.py new file mode 100755 index 0000000..ac8f6ce --- /dev/null +++ b/ss_container.py @@ -0,0 +1,579 @@ +#!/usr/bin/env python3 +# Program Name : ss_container.py +# Description : Delphix implementation script +# Author : Corey Brune +# +# Copyright (c) 2019 by Delphix. +# All rights reserved. +# See http://docs.delphix.com/display/PS/Copyright+Statement for details +# +# Warranty details provided in external file +# for customers who have purchased support. +# +"""Create, delete, refresh and list JS containers. +Usage: + ss_container.py (--list | --create_container \ + --template_name --database ) | \ + --delete_container [--keep_vdbs] | \ + --restore_container --bookmark_name | \ + --remove_owner --container_name | \ + --add_owner --container_name | \ + --refresh_container | --reset_container | \ + --list_hierarchy + [--engine --poll --parallel ] + [--single_thread --config ] + [--logdir ] + + ss_container.py -h | --help | -v | --version + +Creates, Lists, Removes a Self-Service Data Pod + +Examples: + ss_container.py --list + ss_container.py --list_hierarchy suiteCRM-Dev-DataPod + ss_container.py --add_owner dev --container_name suiteCRM-Dev-DataPod + ss_container.py --create_container sscontainer1 --database : \ + --template_name jstemplate1 + ss_container.py --delete_container sscontainer1 + ss_container.py --refresh_container sscontainer1 + ss_container.py --add_owner jsuser --container_name sscontainer1 + ss_container.py --remove_owner jsuser --container_name sscontainer1 + ss_container.py --refresh_container sscontainer1 + ss_container.py --restore_container sscontainer1 --bookmark_name ssbookmark1 + js_conatiner.py --reset_container sscontainer1 + +Options: + --create_container Name of the new SS Container + [default:None] + --container_name Name of the SS Container + --refresh_container Name of the new SS Container + --restore_container Name of the SS Container to restore + --reset_container Reset last data operation + --template_name Name of the JS Template to use for the container + --add_owner Name of the JS Owner for the container + --remove_owner Name of the JS Owner to remove + --bookmark_name Name of the JS Bookmark to restore the container + --keep_vdbs If set, deleting the container will not remove + the underlying VDB(s) + --list_hierarchy Lists hierarchy of a given container name + --delete_container Delete the SS Container + --database Name of the child database(s) to use for the + SS Container + --list List the containers on a given engine + --engine dentifier of Delphix DDP in dxtools.conf. + [default: default] + --single_thread Run as a single thread. Use True if there are + multiple engines and the operation needs to run + in parallel. + [default: True] + --config The path to the dxtools.conf file + [default: ./config/dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./logs/ss_container.log] + -h --help Show this screen. + -v --version Show version. +""" +import sys +import time +from os.path import basename + +from docopt import docopt + +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import database +from delphixpy.v1_10_2.web import selfservice +from delphixpy.v1_10_2.web import user +from delphixpy.v1_10_2.web import vo +from lib import dlpx_exceptions +from lib import dx_logging +from lib import get_references +from lib import get_session +from lib import run_job +from lib.run_async import run_async + +VERSION = "v.0.3.001" + + +def create_container(dlpx_obj, template_name, container_name, database_name): + """ + Create the SS container + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param template_name: name of the self-service template + :type template_name: str + :param container_name: Name of the container to create + :type container_name: str + :param database_name: Name of the database(s) to use in the container + :type database_name: str + :return created container reference + :rtype str + """ + ss_container_params = vo.JSDataContainerCreateWithoutRefreshParameters() + container_ds_lst = [] + for data_set in database_name.split(":"): + container_ds_lst.append( + get_references.build_data_source_params(dlpx_obj, database, data_set) + ) + try: + ss_template_ref = get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.template, template_name + ).reference + ss_container_params.template = ss_template_ref + ss_container_params.timeline_point_parameters = ( + vo.JSTimelinePointLatestTimeInput() + ) + ss_container_params.timeline_point_parameters.sourceDataLayout = ss_template_ref + ss_container_params.data_sources = container_ds_lst + ss_container_params.name = container_name + container_ref = selfservice.container.create( + dlpx_obj.server_session, ss_container_params + ) + dlpx_obj.jobs[ + dlpx_obj.server_session.address + ] = dlpx_obj.server_session.last_job + return container_ref + except ( + dlpx_exceptions.DlpxException, + exceptions.RequestError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f"Container {container_name} was not created. The error was: {err}" + ) + + +def remove_owner(dlpx_obj, owner_name, container_name): + """ + Removes an owner from a container + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param owner_name: Name of the owner to remove + :type owner_name: str + :param container_name: Name of the container + :type container_name: str + """ + owner_params = vo.JSDataContainerModifyOwnerParameters() + try: + owner_params.owner = get_references.find_obj_by_name( + dlpx_obj.server_session, user, owner_name + ).reference + container_obj = get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.container, container_name + ) + selfservice.container.remove_owner( + dlpx_obj.server_session, container_obj.reference, owner_params + ) + except ( + dlpx_exceptions.DlpxObjectNotFound, + exceptions.RequestError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f"The user was not added to container " + f"{container_name}. The error was:\n{err}" + ) + + +def restore_container(dlpx_obj, container_name, bookmark_name): + """ + Restores a container to a given JS bookmark + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param container_name: Name of the container + :type container_name: str + :param bookmark_name: Name of the bookmark to restore + :type bookmark_name: str + """ + bookmark_params = vo.JSDataContainerRestoreParameters() + bookmark_params.timeline_point_parameters = vo.JSTimelinePointBookmarkInput() + bookmark_params.timeline_point_parameters.bookmark = ( + get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.bookmark, bookmark_name + ).reference + ) + bookmark_params.force_option = False + try: + selfservice.container.restore( + dlpx_obj.server_session, + get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.container, container_name + ).reference, + bookmark_params, + ) + dlpx_obj.jobs[ + dlpx_obj.server_session.address + ] = dlpx_obj.server_session.last_job + except ( + dlpx_exceptions.DlpxObjectNotFound, + exceptions.RequestError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception(f"The container was not restored:\n{err}") + + +def add_owner(dlpx_obj, owner_name, container_name): + """ + Adds an owner to a container + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param owner_name: Grant authorizations for the given user on this + container and parent template + :type owner_name: str + :param container_name: Name of the container + :type container_name: str + """ + owner_params = vo.JSDataContainerModifyOwnerParameters() + try: + owner_params.owner = get_references.find_obj_by_name( + dlpx_obj.server_session, user, owner_name + ).reference + selfservice.container.add_owner( + dlpx_obj.server_session, + get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.container, container_name + ).reference, + owner_params, + ) + except ( + dlpx_exceptions.DlpxObjectNotFound, + exceptions.RequestError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f"The user was not removed from the container " f"{container_name}:\n{err}" + ) + + +def refresh_container(dlpx_obj, container_name): + """ + Refreshes a container + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param container_name: Name of the container to refresh + :type container_name: str + """ + try: + selfservice.container.refresh( + dlpx_obj.server_session, + get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.container, container_name + ).reference, + ) + dlpx_obj.jobs[ + dlpx_obj.server_session.address + ] = dlpx_obj.server_session.last_job + except ( + dlpx_exceptions.DlpxObjectNotFound, + exceptions.RequestError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f"Container {container_name} was not refreshed. " f"The error was:\n{err}" + ) + + +def delete_container(dlpx_obj, container_name, keep_vdbs=False): + """ + Deletes a container + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param container_name: Name of the container to delete + :type container_name: str + :param keep_vdbs: When deleting the container, delete the VDBs as well + if set to True + :type keep_vdbs: bool + """ + try: + if keep_vdbs: + ss_container_params = vo.JSDataContainerDeleteParameters() + ss_container_params.delete_data_sources = False + selfservice.container.delete( + dlpx_obj.server_session, + get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.container, container_name + ).reference, + ss_container_params, + ) + elif keep_vdbs is False: + selfservice.container.delete( + dlpx_obj.server_session, + get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.container, container_name + ).reference, + ) + except ( + dlpx_exceptions.DlpxException, + exceptions.RequestError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f"Container {container_name} was not deleted. " f"The error was:\n{err}" + ) + + +def list_containers(dlpx_obj): + """ + List all containers on a given engine + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + """ + header = "Name, Active Branch, Owner, Reference, Template, Last Updated" + ss_containers = selfservice.container.get_all(dlpx_obj.server_session) + try: + print(header) + for ss_container in ss_containers: + last_updated = get_references.convert_timestamp( + dlpx_obj.server_session, ss_container.last_updated[:-5] + ) + dx_logging.print_info( + f"{ss_container.name}, {ss_container.active_branch}, " + f"{ss_container.owner}, {ss_container.reference}," + f"{ss_container.template}, {last_updated}" + ) + except ( + dlpx_exceptions.DlpxException, + exceptions.HttpError, + exceptions.RequestError, + ) as err: + dx_logging.print_exception( + f"ERROR: SS Containers could not be listed. The error was:\n{err}" + ) + + +def reset_container(dlpx_obj, container_name): + """ + Undo the last refresh or restore operation + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param container_name: Name of the container to reset + :type container_name: str + """ + try: + selfservice.container.reset( + dlpx_obj.server_session, + get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.container, container_name + ).reference, + ) + except exceptions.RequestError as err: + dx_logging.print_exception( + f"ERROR: SS Container was not reset. The error was:\n{err}" + ) + + +def list_hierarchy(dlpx_obj, container_name): + """ + Filter container listing. + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param container_name: Name of the container to list child VDBs + :type container_name: str + """ + database_dct = {} + layout_ref = get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.container, container_name + ).reference + for data_source in selfservice.datasource.get_all( + dlpx_obj.server_session, data_layout=layout_ref + ): + db_name = get_references.find_obj_name( + dlpx_obj.server_session, database, data_source.container + ) + + if hasattr(data_source.runtime, "instance_jdbc_string"): + database_dct[db_name] = data_source.runtime.instance_jdbc_string + else: + database_dct[db_name] = None + try: + dx_logging.print_info( + f"Container: {container_name}\n" + f"Related VDBs: " + f"{convert_dct_str(database_dct)}\n" + ) + except AttributeError as err: + dx_logging.print_exception(err) + raise dlpx_exceptions.DlpxException(err) + + +def convert_dct_str(obj_dct): + """ + Convert dictionary into a string for printing + :param obj_dct: Dictionary to convert into a string + :type obj_dct: dict + :return: string object + """ + js_str = "" + if isinstance(obj_dct, dict): + for js_db, js_jdbc in obj_dct.items(): + if isinstance(js_jdbc, list): + js_str += f'{js_db}: {", ".join(js_jdbc)}\n' + elif isinstance(js_jdbc, str): + js_str += f"{js_db}: {js_jdbc}\n" + else: + raise dlpx_exceptions.DlpxException( + f"Passed a non-dictionary object to convert_dct_str():" f"{type(obj_dct)}" + ) + return js_str + + +@run_async +def main_workflow(engine, dlpx_obj, single_thread): + """ + This function is where we create our main workflow. + Use the @run_async decorator to run this function asynchronously. + The @run_async decorator allows us to run against multiple Delphix Engine + simultaneously + :param engine: Dictionary of engines in the config file + :type engine: dict + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param single_thread: True - run single threaded, False - run multi-thread + :type single_thread: bool + """ + try: + # Setup the connection to the Delphix Engine + dlpx_obj.dlpx_session( + engine["ip_address"], + engine["username"], + engine["password"], + engine["use_https"], + ) + except dlpx_exceptions.DlpxObjectNotFound as err: + dx_logging.print_exception( + f'ERROR: Delphix Engine {engine["ip_address"]} encountered ' + f"an error while creating the session:\n{err}\n" + ) + try: + with dlpx_obj.job_mode(single_thread): + if ARGUMENTS["--create_container"]: + create_container( + dlpx_obj, + ARGUMENTS["--template_name"], + ARGUMENTS["--create_container"], + ARGUMENTS["--database"], + ) + dx_logging.print_info( + f'Self Service Container {ARGUMENTS["--create_container"]}' + f"was created successfully." + ) + elif ARGUMENTS["--delete_container"]: + delete_container( + dlpx_obj, + ARGUMENTS["--delete_container"], + ARGUMENTS["--keep_vdbs"], + ) + elif ARGUMENTS["--list"]: + list_containers(dlpx_obj) + elif ARGUMENTS["--remove_owner"]: + remove_owner( + dlpx_obj, + ARGUMENTS["--remove_owner"], + ARGUMENTS["--container_name"], + ) + dx_logging.print_info( + f'User {ARGUMENTS["--remove_owner"]} had ' + f"access revoked from " + f'{ARGUMENTS["--container_name"]}' + ) + elif ARGUMENTS["--restore_container"]: + restore_container( + dlpx_obj, + ARGUMENTS["--restore_container"], + ARGUMENTS["--bookmark_name"], + ) + dx_logging.print_info( + f'Container {ARGUMENTS["--restore_container"]} ' + f"was restored successfully with bookmark " + f'{ARGUMENTS["--bookmark_name"]}' + ) + elif ARGUMENTS["--add_owner"]: + add_owner( + dlpx_obj, + ARGUMENTS["--add_owner"], + ARGUMENTS["--container_name"], + ) + dx_logging.print_info( + f'User {ARGUMENTS["--add_owner"]} was granted ' + f'access to {ARGUMENTS["--container_name"]}' + ) + elif ARGUMENTS["--refresh_container"]: + refresh_container(engine, dlpx_obj, ARGUMENTS["--refresh_container"]) + dx_logging.print_info( + f'The container {ARGUMENTS["--refresh_container"]}' + f" was refreshed." + ) + elif ARGUMENTS["--list_hierarchy"]: + list_hierarchy(dlpx_obj, ARGUMENTS["--list_hierarchy"]) + elif ARGUMENTS["--reset_container"]: + reset_container(dlpx_obj, ARGUMENTS["--reset_container"]) + print(f'Container {ARGUMENTS["--reset_container"]} was reset.') + except ( + dlpx_exceptions.DlpxException, + exceptions.RequestError, + exceptions.JobError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f'Error in ss_container: {engine["hostname"]}\n{err}' + ) + run_job.find_job_state(engine, dlpx_obj) + + +def main(): + """ + Main function - setup global variables and timer + """ + time_start = time.time() + try: + dx_session_obj = get_session.GetSession() + dx_logging.logging_est(ARGUMENTS["--logdir"]) + config_file_path = ARGUMENTS["--config"] + single_thread = ARGUMENTS["--single_thread"] + engine = ARGUMENTS["--engine"] + dx_session_obj.get_config(config_file_path) + for each in run_job.run_job_mt( + main_workflow, dx_session_obj, engine, single_thread + ): + each.join() + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info(f"script took {elapsed_minutes} minutes to get this far.") + # Here we handle what we do when the unexpected happens + except SystemExit as err: + # This is what we use to handle our sys.exit(#) + sys.exit(err) + except dlpx_exceptions.DlpxException as err: + # We use this exception handler when an error occurs in a function. + dx_logging.print_exception( + f"ERROR: Please check the ERROR message below:\n{err}" + ) + sys.exit(2) + except exceptions.HttpError as err: + # We use this exception handler when our connection to Delphix fails + print( + f"ERROR: Connection failed to the Delphix Engine. Please " + f"check the error message below:\n{err}" + ) + sys.exit(2) + except exceptions.JobError as err: + # We use this exception handler when a job fails in Delphix so that we + # have actionable data + print(f"A job failed in the Delphix Engine:\n{err.job}") + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"{basename(__file__)} took {elapsed_minutes} minutes to get this far." + ) + sys.exit(3) + except KeyboardInterrupt: + # We use this exception handler to gracefully handle ctrl+c exits + dx_logging.print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"{basename(__file__)} took {elapsed_minutes} minutes to get this far." + ) + + +if __name__ == "__main__": + # Grab our ARGUMENTS from the doc at the top of the script + ARGUMENTS = docopt(__doc__, version=basename(__file__) + " " + VERSION) + # Feed our ARGUMENTS to the main function, and off we go! + main() diff --git a/ss_template.py b/ss_template.py new file mode 100755 index 0000000..8858e8c --- /dev/null +++ b/ss_template.py @@ -0,0 +1,290 @@ +#!/usr/bin/env python3 +# Program Name : ss_template.py +# Description : Delphix implementation script +# Author : Corey Brune +# +# Copyright (c) 2019 by Delphix. +# All rights reserved. +# See http://docs.delphix.com/display/PS/Copyright+Statement for details +# +# Delphix Support statement available at +# See http://docs.delphix.com/display/PS/PS+Script+Support+Policy for details +# +# Warranty details provided in external file +# for customers who have purchased support. +# +"""Creates, deletes and lists SS templates. +Usage: + ss_template.py (--create_template --database | --list_templates + | --delete_template ) + [--engine | --all] [--parallel ] + [--poll ] [--debug][--single_thread ] + [--config ] [--logdir ] + ss_template.py -h | --help | -v | --version + +Creates, Lists, Removes a Self-Service Template + +Examples: + ss_template.py --list_templates + ss_template.py --create_template jstemplate1 --database + ss_template.py --create_template jstemplate2 --database + ss_template.py --delete_template jstemplate1 + +Options: + --create_template Name of the new SS Template + --delete_template Delete the SS Template + --database Name of the database(s) to use for the SS Template + Note: If adding multiple template DBs, use a + comma (:) to delineate between the DB names. + --list_templates List the templates on a given engine + --single_thread Run as a single thread. Use True if there are + multiple engines and the operation needs to run + in parallel. + [default: True] + --engine Alt Identifier of Delphix engine in dxtools.conf. + [default: default] + --all Run against all engines. + --debug Enable debug logging + --parallel Limit number of jobs to maxjob + --poll The number of seconds to wait between job polls + [default: 10] + --config The path to the dxtools.conf file + [default: ./config/dxtools.conf] + --logdir The path to the logfile you want to use. + [default: ./logs/ss_template.log] + -h --help Show this screen. + -v --version Show version. +""" + +import sys +import time +from os.path import basename + +from docopt import docopt + +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import database +from delphixpy.v1_10_2.web import selfservice +from delphixpy.v1_10_2.web import vo +from lib import dlpx_exceptions +from lib import dx_logging +from lib import get_references +from lib import get_session +from lib import run_job +from lib.run_async import run_async + +VERSION = "v.0.3.001" + + +def create_template(engine, dlpx_obj, template_name, database_name): + """ + Create the SS Template + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param template_name: Name of the template to create + :type template_name: str + :param database_name: Name of the database(s) to use in the template + :type database_name: str + """ + ss_template_params = vo.JSDataTemplateCreateParameters() + ss_template_params.name = template_name + template_ds_lst = [] + template_ref = "" + # engine_name = dlpx_obj.dlpx_ddps["engine_name"] + for data_set in database_name.split(":"): + template_ds_lst.append( + get_references.build_data_source_params(dlpx_obj, database, data_set) + ) + ss_template_params.data_sources = template_ds_lst + try: + template_ref = selfservice.template.create( + dlpx_obj.server_session, ss_template_params + ) + except ( + dlpx_exceptions.DlpxException, + exceptions.RequestError, + exceptions.HttpError, + ) as err: + dx_logging.print_exception( + f"The template {template_name} was not created:\n{err}" + ) + raise dlpx_exceptions.DlpxException(err) + dlpx_obj.jobs[dlpx_obj.server_session.address] = dlpx_obj.server_session.last_job + return template_ref + + +def list_templates(dlpx_obj): + """ + List all templates on a given engine + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + """ + header = "Name, Reference, Active Branch, Last Updated" + try: + ss_templates = selfservice.template.get_all(dlpx_obj.server_session) + if not ss_templates: + dx_logging.print_info(f"No Self Service templates on engine") + else: + dx_logging.print_info(header) + for ss_template in ss_templates: + last_updated = get_references.convert_timestamp( + dlpx_obj.server_session, ss_template.last_updated[:-5] + ) + dx_logging.print_info( + f"{ss_template.name}, {ss_template.reference}," + f"{ss_template.active_branch},{last_updated}" + ) + except ( + dlpx_exceptions.DlpxException, + exceptions.HttpError, + exceptions.RequestError, + ) as err: + raise dlpx_exceptions.DlpxException( + f"ERROR: The templates could not be listed:\n{err}" + ) + + +def delete_template(dlpx_obj, template_name): + """ + Deletes a template + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param template_name: Name of the template to delete + :type template_name: str + """ + try: + template_ref = get_references.find_obj_by_name( + dlpx_obj.server_session, selfservice.template, template_name + ).reference + selfservice.template.delete(dlpx_obj.server_session, template_ref) + except (dlpx_exceptions.DlpxObjectNotFound) as err: + dx_logging.print_exception(f"The template {template_name} not found") + raise dlpx_exceptions.DlpxObjectNotFound( + f"The template {template_name} not found" + ) + except ( + dlpx_exceptions.DlpxException, + exceptions.HttpError, + exceptions.RequestError, + ) as err: + dx_logging.print_exception( + f"\nERROR: The template {template_name} " f"was not deleted:\n{err}" + ) + raise dlpx_exceptions.DlpxException(err) + + +@run_async +def main_workflow(engine, dlpx_obj, single_thread): + """ + This function is where we create our main workflow. + Use the @run_async decorator to run this function asynchronously. + The @run_async decorator allows us to run against multiple Delphix Engine + simultaneously + :param engine: Dictionary of engines in the config file + :type engine: dict + :param dlpx_obj: DDP session object + :type dlpx_obj: lib.GetSession.GetSession object + :param single_thread: True - run single threaded, False - run multi-thread + :type single_thread: bool + """ + try: + # Setup the connection to the Delphix Engine + dlpx_obj.dlpx_session( + engine["ip_address"], engine["username"], engine["password"] + ) + except dlpx_exceptions.DlpxObjectNotFound as err: + dx_logging.print_exception( + f'ERROR: Delphix Engine {engine["ip_address"]} encountered ' + f"an error while creating the session:\n{err}\n" + ) + try: + with dlpx_obj.job_mode(single_thread): + if ARGUMENTS["--create_template"]: + template_ref = create_template( + engine, + dlpx_obj, + ARGUMENTS["--create_template"], + ARGUMENTS["--database"], + ) + dx_logging.print_info( + f'Template {ARGUMENTS["--create_template"]} ' + f"was created successfully. Reference: " + f"{template_ref}\n" + ) + elif ARGUMENTS["--delete_template"]: + delete_template(dlpx_obj, ARGUMENTS["--delete_template"]) + print(f'Template {ARGUMENTS["--delete_template"]} ' f"is deleted.") + elif ARGUMENTS["--list_templates"]: + list_templates(dlpx_obj) + except ( + dlpx_exceptions.DlpxException, + exceptions.RequestError, + exceptions.JobError, + exceptions.HttpError, + dlpx_exceptions.DlpxObjectNotFound, + ) as err: + dx_logging.print_exception( + f"Error in ss_template: " f'{engine["ip_address"]}:\n{err}' + ) + + +def main(): + """ + Main function - setup global variables and timer + """ + time_start = time.time() + try: + dx_session_obj = get_session.GetSession() + dx_logging.logging_est(ARGUMENTS["--logdir"]) + config_file_path = ARGUMENTS["--config"] + single_thread = ARGUMENTS["--single_thread"] + engine = ARGUMENTS["--engine"] + dx_session_obj.get_config(config_file_path) + dx_session_obj.get_config(config_file_path) + for each in run_job.run_job_mt( + main_workflow, dx_session_obj, engine, single_thread + ): + each.join() + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info(f"ss_template took {elapsed_minutes} minutes to complete") + # Here we handle what we do when the unexpected happens + except SystemExit as err: + # This is what we use to handle our sys.exit(#) + sys.exit(err) + except dlpx_exceptions.DlpxException as err: + # We use this exception handler when an error occurs in a function. + dx_logging.print_exception( + f"\nERROR: Please check the ERROR message " f"below:\n{err}" + ) + sys.exit(2) + except exceptions.HttpError as err: + # We use this exception handler when our connection to Delphix fails + print( + f"\nERROR: Connection failed to the Delphix Engine. Please " + f"check the error message below:\n{err}" + ) + sys.exit(2) + except exceptions.JobError as err: + # We use this exception handler when a job fails in Delphix so that we + # have actionable data + print(f"A job failed in the Delphix Engine:\n{err.job}") + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"{basename(__file__)} took {elapsed_minutes}" f" minutes to get this far" + ) + sys.exit(3) + except KeyboardInterrupt: + # We use this exception handler to gracefully handle ctrl+c exits + dx_logging.print_debug("You sent a CTRL+C to interrupt the process") + elapsed_minutes = run_job.time_elapsed(time_start) + dx_logging.print_info( + f"{basename(__file__)} took {elapsed_minutes} minutes to complete" + ) + + +if __name__ == "__main__": + # Grab our ARGUMENTS from the doc at the top of the script + ARGUMENTS = docopt(__doc__, version=basename(__file__) + " " + VERSION) + + # Feed our ARGUMENTS to the main function, and off we go! + main() diff --git a/tests/test_dx_authorization.py b/tests/test_dx_authorization.py deleted file mode 100755 index a29b4d8..0000000 --- a/tests/test_dx_authorization.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python - -""" -Unit tests for DVE authorizations -""" - -import unittest -import sys - -import dx_authorization -from lib.GetSession import GetSession - - -class DxAuthorizationTests(unittest.TestCase): - """ - Creates, activates, lists destroys Delphix Authorizations - - Requirements: VDB named dx_vdb, group named Untitled, and user named jsuser. - Change target_vdb, group and user to reflect values in your environment. - """ - - @classmethod - def setUpClass(cls): - super(DxAuthorizationTests, cls).setUpClass() - cls.server_obj = GetSession() - cls.server_obj.serversess('172.16.169.146', 'delphix_admin', - 'delphix', 'DOMAIN') - cls.server_obj.dlpx_engines['engine_name'] = 'test_engine' - cls.user = 'jsuser' - cls.target_vdb = 'dx_vdb' - cls.group = 'Untitled' - cls.target_type_db = 'database' - cls.target_type_group = 'group' - cls.role_data = 'Data' - cls.role_read = 'Read' - cls.role_owner = 'OWNER' - - def test_create_authorization_group(self): - dx_authorization.create_authorization(self.server_obj, self.role_data, - self.target_type_group, - self.group, self.user) - self.assertIn('created for {}'.format(self.user), - sys.stdout.getvalue().strip()) - - def test_create_authorization_database(self): - dx_authorization.create_authorization(self.server_obj, self.role_data, - self.target_type_db, - self.target_vdb, self.user) - self.assertIn('created for {}'.format(self.user), - sys.stdout.getvalue().strip()) - - def test_lists_dx_authorizations(self): - dx_authorization.list_authorization(self.server_obj) - self.assertIn('sysadmin', sys.stdout.getvalue().strip()) - - @classmethod - def tearDownClass(cls): - super(DxAuthorizationTests, cls).tearDownClass() - cls.server_obj = GetSession() - cls.server_obj.serversess('172.16.169.146', 'delphix_admin', - 'delphix', 'DOMAIN') - cls.user = 'jsuser' - cls.target_vdb = 'dx_vdb' - cls.group = 'Untitled' - cls.target_type_db = 'database' - cls.target_type_group = 'group' - cls.role_data = 'Data' - cls.role_read = 'Read' - cls.role_owner = 'OWNER' - dx_authorization.delete_authorization(cls.server_obj, cls.role_data, - cls.target_type_db, - cls.target_vdb, cls.user) - dx_authorization.delete_authorization(cls.server_obj, cls.role_data, - cls.target_type_group, cls.group, - cls.user) - - -# Run the test case -if __name__ == '__main__': - unittest.main(buffer=True) \ No newline at end of file diff --git a/tests/test_dx_delete_vdb.py b/tests/test_dx_delete_vdb.py new file mode 100644 index 0000000..9ff115e --- /dev/null +++ b/tests/test_dx_delete_vdb.py @@ -0,0 +1,86 @@ +# +# Test routine for dx_delete_vdb +# This test can be run in the following modes: +# 1: Single engine mode, by providing --d +# 2: Multi engine mode by providing --d all +# 3: Default engine mode by not providing a --d argument. +# +""" +Tests dx_delete_vdb +Example: + python3 test_dx_delete_vdb.py +""" + +VERSION = "v.0.0.001" + +import unittest + +import dx_delete_vdb + +import lib.run_job +from lib.dlpx_exceptions import DlpxObjectNotFound +from lib.get_session import GetSession + + +class DxDeleteVDBTest(unittest.TestCase): + def test_something(self): + self.assertEqual(False, False) + + @classmethod + def setUpClass(cls): + super(DxDeleteVDBTest, cls).setUpClass() + config_path = "../config/dxtools.conf" + cls.engine_name = "myve" + cls.vdb_name = "testvdb" + cls.force = False + cls.single_thread = False + cls.dx_session_obj = GetSession() + cls.dx_session_obj.get_config(config_path) + cls.engine = None + + # Test to delete a vdb that does not exist in specified engine + def test_delete_nonexisting_vdb(self): + self.engine = self.dx_session_obj.dlpx_ddps[self.engine_name] + self.dx_session_obj.dlpx_session( + self.engine["ip_address"], self.engine["username"], self.engine["password"] + ) + with self.assertRaises(DlpxObjectNotFound) as ex: + dx_delete_vdb.delete_vdb(self.dx_session_obj, self.vdb_name, self.force) + self.assertEqual(type(ex.exception), DlpxObjectNotFound) + + # Deletes vdb by name across all engines + # The vdb should exist on one or more engines. + def test_delete_vdb_allengines(self): + self.vdb_name = "vOraCRM_BRKFIX" + jobs = [] + jobs_success = True + for eo in self.dx_session_obj.dlpx_ddps: + self.engine = self.dx_session_obj.dlpx_ddps[eo] + self.dx_session_obj.dlpx_session( + self.engine["ip_address"], + self.engine["username"], + self.engine["password"], + ) + try: + jobid = dx_delete_vdb.delete_vdb( + self.dx_session_obj, self.vdb_name, self.force + ) + jobs.append(jobid) + except DlpxObjectNotFound as e: + pass + except Exception as e: + jobs_success = False + break + + for job in jobs: + jobstate = lib.run_job.find_job_state_by_jobid( + eo, self.dx_session_obj, job, 10 + ) + if jobstate == "FAILED": + jobs_success = False + break + self.assertEqual(jobs_success, True) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_dx_operation.py b/tests/test_dx_operation.py deleted file mode 100755 index 3935822..0000000 --- a/tests/test_dx_operation.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python - -""" -Unit tests for DVE operations -""" - -import unittest -import sys - -import dx_operations -from lib.GetSession import GetSession - - -class DxOperationsTests(unittest.TestCase): - """ - Creates, activates, lists destroys Delphix Authorizations - - Requirements: VDB named dx_vdb. - Change target_vdb to reflect values in your environment. - """ - - @classmethod - def setUpClass(cls): - super(DxOperationsTests, cls).setUpClass() - cls.server_obj = GetSession() - cls.server_obj.serversess('172.16.169.146', 'delphix_admin', - 'delphix', 'DOMAIN') - cls.server_obj.dlpx_engines['engine_name'] = 'test_engine' - cls.target_vdb = 'dx_vdb' - - def test_operation_functionality(self): - operations = ['stop', 'start', 'disable', 'enable'] - for op in operations: - dx_operations.dx_obj_operation(self.server_obj, self.target_vdb, op) - self.assertIn('{} was successfully'.format(op), - sys.stdout.getvalue().strip()) - - def test_lists_dx_authorizations(self): - dx_operations.list_databases(self.server_obj) - self.assertIn(self.target_vdb, sys.stdout.getvalue().strip()) - -# Run the test case -if __name__ == '__main__': - unittest.main(module=__name__, buffer=True) \ No newline at end of file diff --git a/tests/test_dx_refresh_vdb.py b/tests/test_dx_refresh_vdb.py new file mode 100644 index 0000000..4216019 --- /dev/null +++ b/tests/test_dx_refresh_vdb.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python3 + +""" +Unit tests for VDB refresh +""" + +import io +import sys +import unittest + +import dx_refresh_vdb + +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import selfservice +from delphixpy.v1_10_2.web import vo +from lib.dx_timeflow import DxTimeflow +from lib.get_session import GetSession + +VERSION = "0.0.0.1" + + +class DxVDBRefresh(unittest.TestCase): + """ + Refreshes a VDB or a group. + + Requirements: + """ + + @classmethod + def setUpClass(cls): + super(DxVDBRefresh, cls).setUpClass() + cls.server_obj = GetSession() + cls.server_obj.dlpx_session( + "172.16.98.44", "delphix_admin", "delphix", "DOMAIN" + ) + cls.server_obj.dlpx_ddps["engine_name"] = "test_engine" + cls.database_name = "12cvdb" + cls.vdb_name = "ss_te" + + def _find_ref(self, f_class, obj_name): + for obj in f_class.get_all(self.server_obj.server_session): + if obj.name == obj_name: + return obj + raise dlpx_execptions.DlpxObjectNotFound + + def test_refresh_vdb_latest(self): + print("TEST - Refresh VDB Latest") + dx_refresh_vdb.refresh_vdb(self.server_obj, self.vdb_name, "LATEST") + + +# def test_refresh_all_vdbs_latest(self): +# print('TEST - Refresh VDB Latest') +# dx_refresh_vdb.refresh_vdb(self.server_obj, self.vdb_name, 'LATEST') + +# def test_refresh_all_vdbs(self): +# print('TEST - Refresh all VDBs') +# create_ref = dx_refresh_vdb.refresh_vdb( +# self.server_obj, self.template_name, self.database_name) +# self.assertIsInstance(create_ref, str) +# ss_template.delete_template(self.server_obj, self.template_name) + +# def test_list_timeflows(self): +# msg = io.StringIO() +# sys.stdout = msg +# ss_template.list_timeflows(self.server_obj) +# sys.stdout = sys.__stdout__ +# self.assertIn('Name, Reference, Active Branch', msg.getvalue()) + +# def test_list_snapshots(self): +# msg = io.StringIO() +# sys.stdout = msg +# ss_template.list_snapshots(self.server_obj) +# sys.stdout = sys.__stdout__ +# self.assertIn('Name, Reference, Active Branch', msg.getvalue()) + +# Run the test case +if __name__ == "__main__": + unittest.main(module=__name__, buffer=True) diff --git a/tests/test_js_bookmarks.py b/tests/test_js_bookmarks.py index a126a80..100c68e 100755 --- a/tests/test_js_bookmarks.py +++ b/tests/test_js_bookmarks.py @@ -4,13 +4,15 @@ Unit tests for Jet Stream delphixpy """ -import unittest import sys +import unittest import js_bookmark + from lib.GetSession import GetSession -VERSION = '0.0.0.1' +VERSION = "0.0.0.1" + class JetStreamBookmarkTests(unittest.TestCase): """ @@ -24,40 +26,47 @@ class JetStreamBookmarkTests(unittest.TestCase): def setUpClass(cls): super(JetStreamBookmarkTests, cls).setUpClass() cls.server_obj = GetSession() - cls.server_obj.serversess('172.16.169.146', 'delphix_admin', - 'delphix', 'DOMAIN') - cls.server_obj.dlpx_engines['engine_name'] = 'test_engine' - cls.data_layout = 'jscontainer' - cls.branch_name = 'default' - cls.bookmark_name = 'js_test_bookmark' - js_bookmark.create_bookmark(cls.server_obj, cls.bookmark_name, - cls.data_layout, cls.branch_name) + cls.server_obj.serversess( + "172.16.169.146", "delphix_admin", "delphix", "DOMAIN" + ) + cls.server_obj.dlpx_engines["engine_name"] = "test_engine" + cls.data_layout = "jscontainer" + cls.branch_name = "default" + cls.bookmark_name = "js_test_bookmark" + js_bookmark.create_bookmark( + cls.server_obj, cls.bookmark_name, cls.data_layout, cls.branch_name + ) def test_unshares_js_bookmark(self): js_bookmark.unshare_bookmark(self.server_obj, self.bookmark_name) - self.assertIn('{} was unshared'.format(self.bookmark_name), - sys.stdout.getvalue().strip()) + self.assertIn( + "{} was unshared".format(self.bookmark_name), sys.stdout.getvalue().strip() + ) def test_shares_js_bookmark(self): js_bookmark.share_bookmark(self.server_obj, self.bookmark_name) - self.assertIn('{} was shared'.format(self.bookmark_name), - sys.stdout.getvalue().strip()) + self.assertIn( + "{} was shared".format(self.bookmark_name), sys.stdout.getvalue().strip() + ) def test_lists_js_bookmarks(self): js_bookmark.list_bookmarks(self.server_obj) - self.assertIn('Name, Reference, Branch'.format(self.bookmark_name), - sys.stdout.getvalue().strip()) + self.assertIn( + "Name, Reference, Branch".format(self.bookmark_name), + sys.stdout.getvalue().strip(), + ) @classmethod def tearDownClass(cls): super(JetStreamBookmarkTests, cls).tearDownClass() cls.server_obj = GetSession() - cls.server_obj.serversess('172.16.169.146', 'delphix_admin', - 'delphix', 'DOMAIN') - cls.bookmark_name = 'js_test_bookmark' + cls.server_obj.serversess( + "172.16.169.146", "delphix_admin", "delphix", "DOMAIN" + ) + cls.bookmark_name = "js_test_bookmark" js_bookmark.delete_bookmark(cls.server_obj, cls.bookmark_name) # Run the test case -if __name__ == '__main__': - unittest.main(module=__name__, buffer=True) \ No newline at end of file +if __name__ == "__main__": + unittest.main(module=__name__, buffer=True) diff --git a/tests/test_js_branches.py b/tests/test_js_branches.py deleted file mode 100755 index e306b27..0000000 --- a/tests/test_js_branches.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python - -""" -Unit tests for Jet Stream delphixpy -""" - -import unittest -import sys - -import js_branch -import js_template -import js_container -from lib.GetSession import GetSession - - -class JetStreamBranchTests(unittest.TestCase): - """ - Creates, activates, lists destroys JS Branches - - Requirements: Parent VDB named jst3, and child VDB named jst3_cld. - Change template_db and database_name to reflect values in your environment. - """ - - @classmethod - def setUpClass(cls): - super(JetStreamBranchTests, cls).setUpClass() - cls.server_obj = GetSession() - cls.server_obj.serversess('172.16.169.146', 'delphix_admin', - 'delphix', 'DOMAIN') - cls.server_obj.dlpx_engines['engine_name'] = 'test_engine' - cls.container_name = 'js_test_container0001' - cls.branch_name = 'js_test_branch0001' - cls.template_name = 'js_test_template0001' - cls.template_db = 'jst3' - cls.database_name = 'jst3_cld' - js_template.create_template(cls.server_obj, cls.template_name, - cls.template_db) - js_container.create_container(cls.server_obj, cls.template_name, - cls.container_name, cls.database_name) - js_branch.create_branch(cls.server_obj, cls.branch_name, - cls.template_name, cls.container_name) - - def test_activate_js_branch(self): - original_branch = 'default' - js_branch.activate_branch(self.server_obj, original_branch) - self.assertIn(original_branch, sys.stdout.getvalue().strip()) - - def test_lists_js_branches(self): - js_branch.list_branches(self.server_obj) - self.assertIn('Branch Name, Data Layout'.format(self.branch_name), - sys.stdout.getvalue().strip()) - - @classmethod - def tearDownClass(cls): - super(JetStreamBranchTests, cls).tearDownClass() - cls.server_obj = GetSession() - cls.server_obj.serversess('172.16.169.146', 'delphix_admin', - 'delphix', 'DOMAIN') - cls.branch_name = 'js_test_branch0001' - cls.container_name = 'js_test_container0001' - cls.template_name = 'js_test_template0001' - js_branch.delete_branch(cls.server_obj, cls.branch_name) - js_container.delete_container(cls.server_obj, cls.container_name, True) - js_template.delete_template(cls.server_obj, cls.template_name) - - -# Run the test case -if __name__ == '__main__': - unittest.main(buffer=True) diff --git a/tests/test_js_containers.py b/tests/test_js_containers.py deleted file mode 100755 index eb24189..0000000 --- a/tests/test_js_containers.py +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/env python - -""" -Unit tests for Jet Stream delphixpy -""" - -import unittest -import sys - -import js_container -import js_template -import js_bookmark -from lib.GetSession import GetSession - - -class JetStreamContainerTests(unittest.TestCase): - """ - Creates, lists, adds/removes users to JS Containers. - - Requirements: Parent VDB named jst3, child VDB named jst3_cld and a - user named jsuser. - Change template_db, database_name and owner_name to reflect values in - your environment. - """ - - @classmethod - def setUpClass(cls): - super(JetStreamContainerTests, cls).setUpClass() - cls.server_obj = GetSession() - cls.server_obj.serversess('172.16.169.146', 'delphix_admin', - 'delphix', 'DOMAIN') - cls.server_obj.dlpx_engines['engine_name'] = 'test_engine' - cls.container_name = 'js_test_container0001' - cls.database_name = 'jst3_cld' - cls.template_db = 'jst3' - cls.bookmark_name = 'js_test_bookmark0001' - cls.template_name = 'js_test_template0001' - - js_template.create_template(cls.server_obj, cls.template_name, - cls.template_db) - js_container.create_container(cls.server_obj, cls.template_name, - cls.container_name, cls.database_name) - js_bookmark.create_bookmark(cls.server_obj, cls.bookmark_name, - cls.template_name) - - def test_adds_removes_js_user(self): - owner_name = 'jsuser' - js_container.add_owner(self.server_obj, owner_name, - self.container_name) - self.assertIn(owner_name, sys.stdout.getvalue().strip()) - - js_container.remove_owner(self.server_obj, owner_name, - self.container_name) - self.assertIn(owner_name, sys.stdout.getvalue().strip()) - - def test_lists_js_containers(self): - js_container.list_containers(self.server_obj) - self.assertIn(self.container_name, sys.stdout.getvalue().strip()) - - def test_lists_hierarchy_js_containers(self): - js_container.list_hierarchy(self.server_obj, self.container_name) - self.assertIn(self.database_name, sys.stdout.getvalue().strip()) - - def test_refreshes_js_containers(self): - js_container.refresh_container(self.server_obj, self.container_name) - self.assertIn(self.container_name, sys.stdout.getvalue().strip()) - - def test_restore_js_container_to_bookmark(self): - js_container.restore_container(self.server_obj, self.container_name, - self.bookmark_name) - self.assertIn(self.container_name, sys.stdout.getvalue().strip()) - - @classmethod - def tearDownClass(cls): - super(JetStreamContainerTests, cls).tearDownClass() - cls.server_obj = GetSession() - cls.container_name = 'js_test_container0001' - cls.server_obj.serversess('172.16.169.146', 'delphix_admin', - 'delphix', 'DOMAIN') - cls.container_name = 'js_test_container0001' - cls.template_name = 'js_test_template0001' - js_container.delete_container(cls.server_obj, cls.container_name, True) - js_template.delete_template(cls.server_obj, cls.template_name) - - - -# Run the test case -if __name__ == '__main__': - unittest.main(buffer=True) \ No newline at end of file diff --git a/tests/test_js_templates.py b/tests/test_js_templates.py deleted file mode 100755 index b38094a..0000000 --- a/tests/test_js_templates.py +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env python - -""" -Unit tests for Jet Stream delphixpy -""" - -import unittest -import sys - -import js_template -from lib.GetSession import GetSession - -VERSION = '0.0.0.015' - -class JetStreamTemplateTests(unittest.TestCase): - """ - Creates, lists, deletes JS Templates - - Requirements: Parent VDB named jst3. - Change database_name to reflect values in your environment. - """ - - @classmethod - def setUpClass(cls): - super(JetStreamTemplateTests, cls).setUpClass() - cls.server_obj = GetSession() - cls.server_obj.serversess('172.16.169.146', 'delphix_admin', - 'delphix', 'DOMAIN') - cls.server_obj.dlpx_engines['engine_name'] = 'test_engine' - cls.database_name = 'jst3' - cls.template_name = 'js_test_template0001' - js_template.create_template(cls.server_obj, cls.template_name, - cls.database_name) - - def test_lists_js_templates(self): - js_template.list_templates(self.server_obj) - self.assertIn(self.template_name, sys.stdout.getvalue().strip()) - - @classmethod - def tearDownClass(cls): - super(JetStreamTemplateTests, cls).tearDownClass() - cls.server_obj = GetSession() - cls.server_obj.serversess('172.16.169.146', 'delphix_admin', - 'delphix', 'DOMAIN') - cls.template_name = 'js_test_template0001' - js_template.delete_template(cls.server_obj, cls.template_name) - - -# Run the test case -if __name__ == '__main__': - unittest.main(module=__name__, buffer=True) diff --git a/tests/test_lib_dxtimeflow.py b/tests/test_lib_dxtimeflow.py new file mode 100755 index 0000000..77909d5 --- /dev/null +++ b/tests/test_lib_dxtimeflow.py @@ -0,0 +1,179 @@ +#!/usr/bin/env python3 + +""" +Unit tests for DPP Timeflows +""" + +import sys +import types +import unittest +from io import StringIO + +from delphixpy.v1_10_2.web import database +from delphixpy.v1_10_2.web import objects +from delphixpy.v1_10_2.web import snapshot +from delphixpy.v1_10_2.web import timeflow +from delphixpy.v1_10_2.web import vo +from lib import dlpx_exceptions +from lib import get_references +from lib.dx_timeflow import DxTimeflow +from lib.get_session import GetSession + +VERSION = "v.0.3.001" + + +class DxTimeflowTests(unittest.TestCase): + """ + Unit tests for Timeflow + + Requirements: customize the settings under the setUpClass method. + """ + + @classmethod + def setUpClass(cls): + super(DxTimeflowTests, cls).setUpClass() + cls.server_obj = GetSession() + cls.server_obj.dlpx_session( + "172.16.98.44", "delphix_admin", "delphix", "DOMAIN" + ) + cls.server_obj.dlpx_ddps["engine_name"] = "test_engine" + cls.dxdb = "classic" + cls.vdb = "12cvbd" + cls.tf_bookmark = "test_tm_create" + cls.tf_params = vo.TimeflowBookmarkCreateParameters() + cls.tf_params.timeflow_point = vo.OracleTimeflowPoint() + cls.tf_obj = DxTimeflow(cls.server_obj.server_session) + + def _find_obj(self, f_class, obj_name): + for obj in f_class.get_all(self.server_obj.server_session): + if obj.name == obj_name: + return obj + raise dlpx_exceptions.DlpxObjectNotFound(f"Didn't find {obj_name}") + + def _find_obj_by_ref(self, f_class, reference): + return f_class.get(self.server_obj.server_session, reference) + raise dlpx_exceptions.DlpxObjectNotFound(f"Didn't find {reference}") + + def _find_snapshots(self, vdb_ref): + return snapshot.get_all(self.server_obj.server_session, database=vdb_ref)[0] + raise dlpx_exceptions.DlpxObjectNotFound(f"Didn't find {obj_name}") + + def _create_tf_bookmark(self, bookmark_name): + self.tf_params.name = bookmark_name + vdb_obj = self._find_obj(database, self.dxdb) + snapshot_obj = self._find_snapshots(vdb_obj.reference) + self.tf_params.timeflow_point.timeflow = vdb_obj.current_timeflow + self.tf_params.timeflow_point.timestamp = ( + snapshot_obj.latest_change_point.timestamp + ) + timeflow.bookmark.create(self.server_obj.server_session, self.tf_params) + + def test_get_timeflow_reference(self): + print("TEST - Get timeflow reference") + tf_ref = self.tf_obj.get_timeflow_reference(self.dxdb) + self.assertIn("TIMEFLOW", tf_ref) + + def test_find_snapshots(self): + print("TEST - Find snapshots") + vdb_obj = self._find_obj(database, self.vdb) + snapshot_obj = self._find_snapshots(vdb_obj.reference) + self.assertIsInstance(self.tf_obj.find_snapshot(snapshot_obj.name), str) + + def test_list_timeflows(self): + print("TEST - List Timeflows and locations") + tf_timeflow_objs = self.tf_obj.list_timeflows() + self.assertIsInstance(tf_timeflow_objs, types.GeneratorType) + + # def test_create_and_delete_bookmark_by_timestamp(self): + # print('TEST - Create TF Bookmark by timestamp') + # bm_name = self.tf_bookmark + "_ts" + # vdb_obj = self._find_obj(database, self.dxdb) + # snapshot_obj = self._find_snapshots(vdb_obj.reference) + # self.tf_obj.create_bookmark(bm_name, self.dxdb, + # snapshot_obj.latest_change_point.timestamp) + # self.tf_obj.delete_bookmark(bm_name) + # + # def test_create_and_delete_bookmark_by_location(self): + # print('TEST - Create TF Bookmark by location') + # bm_name = self.tf_bookmark + '_loc' + # vdb_obj = self._find_obj(database, self.dxdb) + # snapshot_obj = self._find_snapshots(vdb_obj.reference) + # self.tf_obj.create_bookmark( + # bm_name, self.dxdb, + # location=snapshot_obj.latest_change_point.location) + # self.tf_obj.delete_bookmark(bm_name) + + def test_list_tf_bookmarks(self): + print("TEST - Get timeflow bookmarks") + tf_bookmark_objs = self.tf_obj.list_tf_bookmarks() + self.assertIsInstance(tf_bookmark_objs, types.GeneratorType) + + def test_get_timeflow_reference(self): + print("TEST - Get timeflow reference") + tf_ref = self.tf_obj.get_timeflow_reference(self.dxdb) + self.assertIsInstance(tf_ref, str) + + # def test_refresh_vdb_tf_bookmark(self): + # print('TEST - Refresh VDB from TF Bookmark') + # bm_name = 'test_bookmark' + # self._create_tf_bookmark(bm_name) + # self.tf_obj.refresh_vdb_tf_bookmark(self.vdb, bm_name) + # self.tf_obj.delete_bookmark(bm_name) + + def test_set_timeflow_point_snapshot_latest_point(self): + print("TEST - Set Timeflow Point by Snapshot") + vdb_obj = get_references.find_obj_by_name( + self.server_obj.server_session, database.database, self.vdb + ) + tfp_snap = self.tf_obj.set_timeflow_point(vdb_obj, "snapshot") + self.assertIsInstance( + tfp_snap, objects.TimeflowPointSemantic.TimeflowPointSemantic + ) + + def test_set_timeflow_point_snapshot_latest_point(self): + print("TEST - Set Timeflow Point by Snapshot") + vdb_obj = get_references.find_obj_by_name( + self.server_obj.server_session, database, self.vdb + ) + tfp_snap = self.tf_obj.set_timeflow_point(vdb_obj, "snapshot") + self.assertIsInstance( + tfp_snap, objects.TimeflowPointSemantic.TimeflowPointSemantic + ) + + # def test_set_timeflow_point_named_snapshot(self): + # print('TEST - Set Timeflow Point by Snapshot') + # vdb_obj = self._find_obj(database, self.vdb) + # snapshot_obj = self._find_snapshots(vdb_obj.reference) + # vdb_obj = get_references.find_obj_by_name( + # self.server_obj.server_session, database, self.vdb) + # tfp_snap = self.tf_obj.set_timeflow_point( + # vdb_obj, 'snapshot', timestamp=snapshot_obj.name) + # self.assertIsInstance( + # tfp_snap, objects.TimeflowPointSnapshot.TimeflowPointSnapshot) + + def test_set_timeflow_point_by_time_timeflow_name(self): + print("TEST - Set timeflow point by time and timeflow name") + vdb_obj = self._find_obj(database, self.vdb) + snapshot_obj = self._find_snapshots(vdb_obj.reference) + tf_obj = self._find_obj_by_ref(timeflow, snapshot_obj.timeflow) + tfp_snap = self.tf_obj.set_timeflow_point( + vdb_obj, "time", timestamp=snapshot_obj.name, timeflow_name=tf_obj.name + ) + self.assertIsInstance( + tfp_snap, objects.TimeflowPointTimestamp.TimeflowPointTimestamp + ) + + def test_set_timeflow_point_time_latest_point(self): + print("TEST - Set timeflow point by time latest point") + vdb_obj = get_references.find_obj_by_name( + self.server_obj.server_session, database.database, self.vdb + ) + tfp_snap = self.tf_obj.set_timeflow_point(vdb_obj, "time") + self.assertIsInstance( + tfp_snap, objects.TimeflowPointSemantic.TimeflowPointSemantic + ) + + +# Run the test case +if __name__ == "__main__": + unittest.main(module=__name__, buffer=True) diff --git a/tests/test_lib_get_references.py b/tests/test_lib_get_references.py new file mode 100755 index 0000000..ec6291b --- /dev/null +++ b/tests/test_lib_get_references.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python3 + +""" +Unit tests for DPP Timeflows +""" + +import sys +import types +import unittest +from io import StringIO + +from delphixpy.v1_10_2 import web +from lib import get_references +from lib.get_session import GetSession + + +class GetReferencesTests(unittest.TestCase): + """ + Unit tests for the GetReferences class + + Requirements: Customize variables under the setUpClass() method. + """ + + @classmethod + def setUpClass(cls): + super(GetReferencesTests, cls).setUpClass() + cls.server_obj = GetSession() + cls.server_obj.dlpx_session( + "172.16.98.44", "delphix_admin", "delphix", "DOMAIN" + ) + cls.server_obj.dlpx_ddps["engine_name"] = "test_engine" + cls.zulu_timestamp = "2018-08-24T19:14:14" + cls.db_name = "classic" + cls.db_reference = "ORACLE_DB_CONTAINER-508" + cls.env_name = "Masking Source" + cls.child_vdb = "Vdlp_112" + cls.env_ref = "UNIX_HOST_ENVIRONMENT-18" + cls.install_home = "/u01/app/oracle/product/11.2.0.4/dbhome_1" + cls.sourceconfig_name = "Vdlpxdb1_112" + cls.group_name = "Sources" + + def test_convert_timestamp(self): + print("TEST - Convert Zulu timezone into DDP timezone") + local_tz = get_references.convert_timestamp( + self.server_obj.server_session, self.zulu_timestamp + ) + self.assertIsInstance(local_tz, str) + + def test_get_running_job(self): + # This test requires a running job, otherwise it will fail + print("TEST - Get Running Job") + obj_ref = get_references.get_running_job( + self.server_obj.server_session, self.db_reference + ) + self.assertIsInstance(obj_ref, str) + + def test_find_obj_by_name(self): + print("TEST - Find Object by Name") + obj_ref = get_references.find_obj_by_name( + self.server_obj.server_session, web.environment, self.env_name + ) + self.assertIsInstance( + obj_ref, web.objects.UnixHostEnvironment.UnixHostEnvironment + ) + + def test_find_source_by_db_name(self): + print("TEST - Find source by database name") + src_obj = get_references.find_source_by_db_name( + self.server_obj.server_session, self.child_vdb + ) + self.assertIsInstance( + src_obj, web.objects.OracleVirtualSource.OracleVirtualSource + ) + + def test_find_obj_name(self): + print("TEST - Find object name from reference") + obj_name = get_references.find_obj_name( + self.server_obj.server_session, web.database, self.db_reference + ) + self.assertIsInstance(obj_name, str) + + def test_find_db_repo(self): + print("TEST - Find database repository") + db_repo_name = get_references.find_db_repo( + self.server_obj.server_session, + "OracleInstall", + self.env_ref, + self.install_home, + ) + self.assertIsInstance(db_repo_name, web.objects.OracleInstall.OracleInstall) + + def test_find_sourceconfig(self): + print("TEST - Find Sourceconfig") + src_name = get_references.find_sourceconfig( + self.server_obj.server_session, self.sourceconfig_name, self.env_ref + ) + self.assertIsInstance(src_name, web.objects.OracleSIConfig.OracleSIConfig) + + def test_find_all_databases_by_group_name(self): + print("TEST - Find all databases by group name") + obj_lst = get_references.find_all_databases_by_group_name( + self.server_obj.server_session, self.group_name + ) + self.assertIsInstance(obj_lst, list) + + +# Run the test case +if __name__ == "__main__": + unittest.main(module=__name__, buffer=True) diff --git a/tests/test_lib_get_session.py b/tests/test_lib_get_session.py new file mode 100755 index 0000000..78d8cc5 --- /dev/null +++ b/tests/test_lib_get_session.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python3 + +""" +Unit tests for DPP GetSession +""" + +import os +import ssl +import sys +import types +import unittest +from io import StringIO + +from delphixpy.v1_10_2.web import database +from lib import get_references +from lib.get_session import GetSession + + +class GetSessionTests(unittest.TestCase): + """ + Unit test for GetSession + + Requirements: Customize variables under the setUpClass() method. + """ + + @classmethod + def setUpClass(cls): + super(GetSessionTests, cls).setUpClass() + cls.server_obj = GetSession() + cls.engine_ip = "172.16.98.44" + cls.engine_user = "delphix_admin" + cls.engine_pass = "delphix" + + def test_get_config(self): + print("TEST - Get config") + self.server_obj.get_config() + self.assertNotEqual(0, len(self.server_obj.dlpx_ddps)) + + def test_server_session(self): + print("TEST - Server session") + for engine in self.server_obj.dlpx_ddps.keys(): + ddps_dct = self.server_obj.dlpx_ddps[engine].pop() + self.server_obj.dlpx_session( + ddps_dct["ip_address"], + ddps_dct["username"], + ddps_dct["password"], + ddps_dct["use_https"], + ) + + def test_job_mode_sync(self): + print("TEST - Job mode sync") + self.server_obj.dlpx_session( + self.engine_ip, self.engine_user, self.engine_pass, "DOMAIN" + ) + with self.server_obj.job_mode(): + database.get_all(self.server_obj.server_session) + + def test_job_mode_async(self): + print("TEST - Job mode async") + self.server_obj.dlpx_session( + self.engine_ip, self.engine_user, self.engine_pass, "DOMAIN" + ) + with self.server_obj.job_mode(False): + database.get_all(self.server_obj.server_session) + + def test_server_wait(self): + print("TEST - Server wait") + self.server_obj.dlpx_session( + self.engine_ip, self.engine_user, self.engine_pass, "DOMAIN" + ) + self.server_obj.server_wait() + + +# Run the unit tests +if __name__ == "__main__": + unittest.main(module=__name__, buffer=True) diff --git a/tests/test_lib_run_job.py b/tests/test_lib_run_job.py new file mode 100755 index 0000000..e290826 --- /dev/null +++ b/tests/test_lib_run_job.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python3 + +""" +Unit tests for DPP run_job +""" + +import os +import ssl +import sys +import types +import unittest +from io import StringIO + +from delphixpy.v1_10_2 import web +from lib.dx_timeflow import DxTimeflow +from lib.get_session import GetSession +from lib.run_job import run_job + + +class RunJob(unittest.TestCase): + """ + Unit tests for the RunJob module + + Requirements: Customize variables under the setUpClass() method. + """ + + @classmethod + def setUpClass(cls): + super(RunJob, cls).setUpClass() + if not os.environ.get("PYTHONHTTPSVERIFY", "") and getattr( + ssl, "_create_unverified_context", None + ): + ssl._create_default_https_context = ssl._create_unverified_context + cls.server_obj = GetSession() + cls.server_obj.dlpx_session( + "172.16.98.44", "delphix_admin", "delphix", "DOMAIN" + ) + cls.server_obj.dlpx_ddps["engine_name"] = "test_engine" + + def test_run_job_all(self): + print("TEST - Run Job All") + ret_val = run_job(main_func, self.server_obj) + self.assertIsInstance(ret_val, types.GeneratorType) + + def test_run_job_default_ddp(self): + print("TEST - Run Job default DDP") + ret_val = run_job(main_func, self.server_obj, "default") + self.assertIsInstance(ret_val, types.GeneratorType) + + def test_run_job_named_ddp(self): + print("TEST - Run Job named DDP") + ret_val = run_job(main_func, self.server_obj, "landshark") + self.assertIsInstance(ret_val, types.GeneratorType) + + +def main_func(var): + import time + + print(f"var passed: {var}") + time.sleep(5) + + +# Run the test case +if __name__ == "__main__": + unittest.main(module=__name__, buffer=True) diff --git a/tests/test_ss_bookmark.py b/tests/test_ss_bookmark.py new file mode 100755 index 0000000..1b7b35e --- /dev/null +++ b/tests/test_ss_bookmark.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python3 + +""" +Unit tests for Self Service bookmark +""" + +import io +import sys +import unittest + +import ss_bookmark + +from delphixpy.v1_10_2.web import selfservice +from delphixpy.v1_10_2.web import vo +from lib.dx_timeflow import DxTimeflow +from lib.get_session import GetSession + +VERSION = "0.0.0.1" + + +class SelfServiceBookmarkTests(unittest.TestCase): + """ + Creates, lists, shares/unshares SS Bookmarks. + + Requirements: data_layout named sstemplate3 exists on the engine. + Change data_layout to reflect values in your environment. + """ + + @classmethod + def setUpClass(cls): + super(SelfServiceBookmarkTests, cls).setUpClass() + cls.server_obj = GetSession() + cls.server_obj.dlpx_session( + "172.16.98.44", "delphix_admin", "delphix", "DOMAIN" + ) + cls.server_obj.dlpx_ddps["engine_name"] = "test_engine" + cls.data_layout = "ss_data_pod" + cls.branch_name = "default" + cls.bookmark_name = "ss_test_bookmark" + + def _find_ref(self, f_class, obj_name): + for obj in f_class.get_all(self.server_obj.server_session): + if obj.name == obj_name: + return obj + + def test_create_ss_bookmark(self): + print("TEST - Create SS Bookmark") + create_ref = ss_bookmark.create_bookmark( + self.server_obj, self.bookmark_name, self.data_layout + ) + self.assertIsInstance(create_ref, str) + selfservice.bookmark.delete(self.server_obj.server_session, create_ref) + + def test_create_ss_bookmark_with_branch(self): + print("TEST - Create SS Bookmark with branch") + create_ref = ss_bookmark.create_bookmark( + self.server_obj, self.bookmark_name, self.data_layout, self.branch_name + ) + self.assertIsInstance(create_ref, str) + selfservice.bookmark.delete(self.server_obj.server_session, create_ref) + + def test_create_ss_bookmark_with_tags(self): + print("TEST - Create SS Bookmark with tags") + tags = "version 123, break fix, delphix" + create_ref = ss_bookmark.create_bookmark( + self.server_obj, self.bookmark_name, self.data_layout, None, tags + ) + self.assertIsInstance(create_ref, str) + selfservice.bookmark.delete(self.server_obj.server_session, create_ref) + + def test_create_ss_bookmark_with_description(self): + print("TEST - Create SS Bookmark with description") + description = "unit testing - ss bookmark" + create_ref = ss_bookmark.create_bookmark( + self.server_obj, + self.bookmark_name, + self.data_layout, + None, + None, + description, + ) + self.assertIsInstance(create_ref, str) + selfservice.bookmark.delete(self.server_obj.server_session, create_ref) + + def test_list_ss_bookmarks(self): + msg = io.StringIO() + sys.stdout = msg + ss_bookmark.list_bookmarks(self.server_obj) + sys.stdout = sys.__stdout__ + self.assertIn("Name, Reference, Branch", msg.getvalue()) + + def test_unshare_ss_bookmark(self): + msg = io.StringIO() + sys.stdout = msg + create_params = vo.JSBookmarkCreateParameters() + create_params.bookmark = vo.JSBookmark() + create_params.timeline_point_parameters = vo.JSTimelinePointLatestTimeInput() + data_layout_obj = self._find_ref(selfservice.container, self.data_layout) + create_params.bookmark.branch = data_layout_obj.active_branch + create_params.bookmark.name = self.bookmark_name + create_params.timeline_point_parameters.source_data_layout = ( + data_layout_obj.reference + ) + create_ref = selfservice.bookmark.create( + self.server_obj.server_session, create_params + ) + ss_bookmark.share_bookmark(self.server_obj, self.bookmark_name) + ss_bookmark.unshare_bookmark(self.server_obj, self.bookmark_name) + sys.stdout = sys.__stdout__ + self.assertIn(f"{self.bookmark_name} was unshared", msg.getvalue()) + selfservice.bookmark.delete(self.server_obj.server_session, create_ref) + + def test_share_ss_bookmark(self): + msg = io.StringIO() + sys.stdout = msg + create_params = vo.JSBookmarkCreateParameters() + create_params.bookmark = vo.JSBookmark() + create_params.timeline_point_parameters = vo.JSTimelinePointLatestTimeInput() + data_layout_obj = self._find_ref(selfservice.template, self.data_layout) + create_params.bookmark.branch = data_layout_obj.active_branch + create_params.bookmark.name = self.bookmark_name + create_params.timeline_point_parameters.source_data_layout = ( + data_layout_obj.reference + ) + create_ref = selfservice.bookmark.create( + self.server_obj.server_session, create_params + ) + ss_bookmark.share_bookmark(self.server_obj, self.bookmark_name) + sys.stdout = sys.__stdout__ + self.assertIn(f"{self.bookmark_name} was shared", msg.getvalue()) + selfservice.bookmark.delete(self.server_obj.server_session, create_ref) + + +# Run the test case +if __name__ == "__main__": + unittest.main(module=__name__, buffer=True) diff --git a/tests/test_ss_bookmarks.py b/tests/test_ss_bookmarks.py new file mode 100755 index 0000000..100c68e --- /dev/null +++ b/tests/test_ss_bookmarks.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python + +""" +Unit tests for Jet Stream delphixpy +""" + +import sys +import unittest + +import js_bookmark + +from lib.GetSession import GetSession + +VERSION = "0.0.0.1" + + +class JetStreamBookmarkTests(unittest.TestCase): + """ + Creates, lists, shares/unshares JS Bookmarks. + + Requirements: data_layout named jstemplate3 exists on the engine. + Change data_layout to reflect values in your environment. + """ + + @classmethod + def setUpClass(cls): + super(JetStreamBookmarkTests, cls).setUpClass() + cls.server_obj = GetSession() + cls.server_obj.serversess( + "172.16.169.146", "delphix_admin", "delphix", "DOMAIN" + ) + cls.server_obj.dlpx_engines["engine_name"] = "test_engine" + cls.data_layout = "jscontainer" + cls.branch_name = "default" + cls.bookmark_name = "js_test_bookmark" + js_bookmark.create_bookmark( + cls.server_obj, cls.bookmark_name, cls.data_layout, cls.branch_name + ) + + def test_unshares_js_bookmark(self): + js_bookmark.unshare_bookmark(self.server_obj, self.bookmark_name) + self.assertIn( + "{} was unshared".format(self.bookmark_name), sys.stdout.getvalue().strip() + ) + + def test_shares_js_bookmark(self): + js_bookmark.share_bookmark(self.server_obj, self.bookmark_name) + self.assertIn( + "{} was shared".format(self.bookmark_name), sys.stdout.getvalue().strip() + ) + + def test_lists_js_bookmarks(self): + js_bookmark.list_bookmarks(self.server_obj) + self.assertIn( + "Name, Reference, Branch".format(self.bookmark_name), + sys.stdout.getvalue().strip(), + ) + + @classmethod + def tearDownClass(cls): + super(JetStreamBookmarkTests, cls).tearDownClass() + cls.server_obj = GetSession() + cls.server_obj.serversess( + "172.16.169.146", "delphix_admin", "delphix", "DOMAIN" + ) + cls.bookmark_name = "js_test_bookmark" + js_bookmark.delete_bookmark(cls.server_obj, cls.bookmark_name) + + +# Run the test case +if __name__ == "__main__": + unittest.main(module=__name__, buffer=True) diff --git a/tests/test_ss_container.py b/tests/test_ss_container.py new file mode 100755 index 0000000..e53ea6f --- /dev/null +++ b/tests/test_ss_container.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python3 + +""" +Unit tests for Self Service container +""" + +import io +import sys +import unittest + +import ss_container + +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import selfservice +from delphixpy.v1_10_2.web import vo +from lib.dx_timeflow import DxTimeflow +from lib.get_session import GetSession + +VERSION = "0.0.0.1" + + +class SelfServiceContainerTests(unittest.TestCase): + """ + Creates, deletes, lists, restores SS Containers. + + Requirements: data_template named ss_data_template exists on the engine. + Change data_template to reflect values in your environment. + """ + + @classmethod + def setUpClass(cls): + super(SelfServiceContainerTests, cls).setUpClass() + cls.server_obj = GetSession() + cls.server_obj.dlpx_session( + "172.16.98.44", "delphix_admin", "delphix", "DOMAIN" + ) + cls.server_obj.dlpx_ddps["engine_name"] = "test_engine" + cls.data_template = "ss_data_template" + cls.container_name = "ss_data_pod" + cls.database_name = "ss_te" + cls.owner_name = "dev" + cls.bookmark_name = "ss_bookmark" + + def _find_ref(self, f_class, obj_name): + for obj in f_class.get_all(self.server_obj.server_session): + if obj.name == obj_name: + return obj + raise dlpx_execptions.DlpxObjectNotFound + + def _create_container(self): + return ss_container.create_container( + self.server_obj, self.data_template, self.container_name, self.database_name + ) + + def _delete_container(self, create_ref): + delete_params = vo.JSDataContainerDeleteParameters() + delete_params.delete_data_sources = False + selfservice.container.delete( + self.server_obj.server_session, create_ref, delete_params + ) + + def test_create_add_remove_users_ss_container(self): + print("TEST - Create, add/remove users in an SS Container") + create_ref = ss_container.create_container( + self.server_obj, self.data_template, self.container_name, self.database_name + ) + self.assertIsInstance(create_ref, str) + ss_container.add_owner(self.server_obj, self.owner_name, self.container_name) + ss_container.remove_owner(self.server_obj, self.owner_name, self.container_name) + self._delete_container(create_ref) + + def test_refresh_container(self): + print("TEST - Refresh Container") + create_ref = ss_container.create_container( + self.server_obj, self.data_template, self.container_name, self.database_name + ) + self.assertIsInstance(create_ref, str) + ss_container.refresh_container(self.server_obj, self.container_name) + self._delete_container(create_ref) + + def test_restore_and_reset_container(self): + create_ref = self._create_container() + data_layout_obj = self._find_ref(selfservice.container, self.container_name) + ss_bookmark_params = vo.JSBookmarkCreateParameters() + ss_bookmark_params.bookmark = vo.JSBookmark() + ss_bookmark_params.bookmark.name = self.bookmark_name + ss_bookmark_params.bookmark.branch = data_layout_obj.active_branch + ss_bookmark_params.timeline_point_parameters = ( + vo.JSTimelinePointLatestTimeInput() + ) + ss_bookmark_params.timeline_point_parameters.source_data_layout = ( + data_layout_obj.reference + ) + ss_bookmark_ref = selfservice.bookmark.create( + self.server_obj.server_session, ss_bookmark_params + ) + ss_container.restore_container( + self.server_obj, self.container_name, self.bookmark_name + ) + ss_container.reset_container(self.server_obj, self.container_name) + self._delete_container(create_ref) + + def test_list_containers(self): + msg = io.StringIO() + sys.stdout = msg + ss_container.list_containers(self.server_obj) + sys.stdout = sys.__stdout__ + self.assertIn("Name, Active Branch, Owner", msg.getvalue()) + + def test_list_hierarchy_containers(self): + create_ref = self._create_container() + msg = io.StringIO() + sys.stdout = msg + ss_container.list_hierarchy(self.server_obj, self.container_name) + sys.stdout = sys.__stdout__ + self.assertIn("Related VDBs:", msg.getvalue()) + self._delete_container(create_ref) + + +# Run the test case +if __name__ == "__main__": + unittest.main(module=__name__, buffer=True) diff --git a/tests/test_ss_template.py b/tests/test_ss_template.py new file mode 100755 index 0000000..69c4374 --- /dev/null +++ b/tests/test_ss_template.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 + +""" +Unit tests for Self Service template +""" + +import io +import sys +import unittest + +import ss_template + +from delphixpy.v1_10_2 import exceptions +from delphixpy.v1_10_2.web import selfservice +from delphixpy.v1_10_2.web import vo +from lib.dx_timeflow import DxTimeflow +from lib.get_session import GetSession + +VERSION = "0.0.0.1" + + +class SelfServiceTemplateTests(unittest.TestCase): + """ + Creates, deletes, lists, restores SS Templates. + + Requirements: data_template named ss_data_template exists on the engine. + Change data_template to reflect values in your environment. + """ + + @classmethod + def setUpClass(cls): + super(SelfServiceTemplateTests, cls).setUpClass() + cls.server_obj = GetSession() + cls.server_obj.dlpx_session( + "172.16.98.44", "delphix_admin", "delphix", "DOMAIN" + ) + cls.server_obj.dlpx_ddps["engine_name"] = "test_engine" + cls.template_name = "ss_data_pod" + cls.database_name = "ss_tmpl" + cls.owner_name = "dev" + cls.bookmark_name = "ss_bookmark" + + def _find_ref(self, f_class, obj_name): + for obj in f_class.get_all(self.server_obj.server_session): + if obj.name == obj_name: + return obj + raise dlpx_execptions.DlpxObjectNotFound + + def test_create_and_delete_ss_template(self): + print("TEST - Create and Delete a SS Template") + create_ref = ss_template.create_template( + self.server_obj, self.template_name, self.database_name + ) + self.assertIsInstance(create_ref, str) + ss_template.delete_template(self.server_obj, self.template_name) + + def test_list_templates(self): + msg = io.StringIO() + sys.stdout = msg + ss_template.list_templates(self.server_obj) + sys.stdout = sys.__stdout__ + self.assertIn("Name, Reference, Active Branch", msg.getvalue()) + + +# Run the test case +if __name__ == "__main__": + unittest.main(module=__name__, buffer=True) diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..686c8b5 --- /dev/null +++ b/tox.ini @@ -0,0 +1,11 @@ +[tox] +envlist = format +skipsdist = True + +[testenv:format] +description = Format the code base to adhere to our styles, and complain about what we cannot do automatically +basepython = python3 +deps = + pre-commit +skip_install = True +commands = pre-commit run {posargs} diff --git a/trigger_replication.py b/trigger_replication.py deleted file mode 100755 index 939a2a4..0000000 --- a/trigger_replication.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python -#Adam Bowen Sept 2016 -VERSION="v.0.0.002" -#just a quick and dirty example of executing a replication profile - -from delphixpy.v1_6_0.delphix_engine import DelphixEngine -from delphixpy.v1_6_0.web import replication -from delphixpy.v1_6_0.web.vo import ReplicationSpec - -engine_address = "192.168.218.177" -engine_username = "delphix_admin" -engine_password = "landshark" - -replication_profile_name = "Example Replication Profile" - -def serversess(f_engine_address, f_engine_username, f_engine_password): - """ - Function to setup the session with the Delphix Engine - """ - server_session= DelphixEngine(f_engine_address, f_engine_username, f_engine_password, "DOMAIN") - return server_session - -def find_obj_by_name(server, f_class, obj_name): - """ - Function to find objects by name and object class, and return object's reference as a string - You might use this function to find objects like groups. - """ - print "Searching objects in the " + f_class.__name__ + " class\n for one named \"" + obj_name +"\"" - obj_ref = '' - - all_objs = f_class.get_all(server) - for obj in all_objs: - if obj.name == obj_name: - print "Found a match " + str(obj.reference) - return obj - - -server = serversess(engine_address, engine_username, engine_password) - -replication_list=replication.spec.get_all(server) - -print "##### REPLICATION LIST #######" -for obj in replication_list: - print obj.name -print "##### END REPLICATION LIST #######" - -replication_spec = find_obj_by_name(server, replication.spec, replication_profile_name) - -print "##### REPLICATION PROFILE: " + replication_profile_name +" #######" -print replication_spec.reference - -print "Executing " + replication_profile_name - -replication.spec.execute(server, replication_spec.reference) - -print replication_profile_name + " executed." \ No newline at end of file diff --git a/via_httplib.py b/via_httplib.py deleted file mode 100644 index 65647e0..0000000 --- a/via_httplib.py +++ /dev/null @@ -1,148 +0,0 @@ -#!/usr/bin/env python - -# -# Copyright (c) 2018 by Delphix. All rights reserved. -# - -import os -import sys -import json -import argparse -from argparse import RawTextHelpFormatter -import urllib -import httplib - - -SCRIPT_DESCRIPTION = """\ -Connect to Delphix engine to run some queries using the http lib library -""" - -# globals used by helper functions -dlpx_host = "" -dlpx_user = "" -dlpx_password = "" -dlpx_cookie = None -major = 1 #API Major version number -minor = 6 #API Minor version number -micro = 0 #API micro version number - -def main(): - global dlpx_host - global dlpx_user - global dlpx_password - global dlpx_cookie - - # parse args and print usage message if necessary - parser = argparse.ArgumentParser(description=SCRIPT_DESCRIPTION, formatter_class=RawTextHelpFormatter) - parser.add_argument("dlpxHost", help="The target Delphix Engine.", type=str) - parser.add_argument("dlpxUser", help="The username to use to log into the Delphix Engine.", type=str, - nargs="?", default="delphix_admin") - parser.add_argument("dlpxPassword", help="The password to use to log into the Delphix Engine.", type=str, - nargs="?", default="delphix") - args = parser.parse_args() - - # save args to variables with shorter names - dlpx_host = args.dlpxHost - dlpx_user = args.dlpxUser - dlpx_password = args.dlpxPassword - - api_version = {"type": "APIVersion", "major": major, "minor": minor, "micro": micro} - - # log into the Delphix Engine in order to set cookie - print "Logging into " + dlpx_host + "..." - log_into_dlpx_engine(api_version) - print "SUCCESS - Logged in as " + dlpx_user - - response = dlpx_get("delphix/user") - for item in response["result"]: - print(item["name"]) - # exit with success - sys.exit(0) - - -def check_response(response): - if response.status is not 200: - sys.stderr.write("ERROR: Expected a response of HTTP status 200 (Success) but received something different.\n") - sys.stderr.write("Response status: " + str(response.status) + "\n") - sys.stderr.write("Response reason: " + response.reason + "\n") - sys.exit(1) - -def dlpx_post_json(resource, payload): - global dlpx_host - global dlpx_user - global dlpx_password - global dlpx_cookie - - # encode payload for request - data = json.dumps(payload) - - # form http header, add cookie if one has been set - headers = { "Content-type": "application/json" } - if dlpx_cookie is not None: - headers["Cookie"] = dlpx_cookie - - # issue request - h = httplib.HTTPConnection(dlpx_host) - h.request('POST', "/resources/json/" + resource, data, headers) - r = h.getresponse() - check_response(r) - - # save cookie if one was received - if r.getheader("set-cookie", None) is not None: - dlpx_cookie = r.getheader("set-cookie") - - # return response as parsed json - r_payload = r.read() - return json.loads(r_payload) - -def dlpx_get(resource, payload=None): - global dlpx_host - global dlpx_user - global dlpx_password - global dlpx_cookie - - - if payload: - # encode payload for request - data = json.dumps(payload) - else: - data = None - - # form http header, add cookie if one has been set - headers = { "Content-type": "application/json" } - if dlpx_cookie is not None: - headers["Cookie"] = dlpx_cookie - - # issue request - h = httplib.HTTPConnection(dlpx_host) - h.request('GET', "/resources/json/" + resource, data, headers) - r = h.getresponse() - check_response(r) - - # save cookie if one was received - if r.getheader("set-cookie", None) is not None: - dlpx_cookie = r.getheader("set-cookie") - - # return response as parsed json - r_payload = r.read() - return json.loads(r_payload) - -def log_into_dlpx_engine(api_version): - dlpx_post_json("delphix/session", { - "type": "APISession", - "version": { - "type": "APIVersion", - "major": api_version["major"], - "minor": api_version["minor"], - "micro": api_version["micro"] - } - }) - - dlpx_post_json("delphix/login", { - "type": "LoginRequest", - "username": dlpx_user, - "password": dlpx_password - }) - -if __name__ == "__main__": - main()