From 17cd93e475cd21120b629f60bc9939618f1b2b3d Mon Sep 17 00:00:00 2001 From: GBBBAS <42962356+GBBBAS@users.noreply.github.com> Date: Wed, 28 Feb 2024 16:30:42 +0000 Subject: [PATCH] Add Deploy Stop method Signed-off-by: GBBBAS <42962356+GBBBAS@users.noreply.github.com> --- .../rtdip_sdk/pipelines/deploy/databricks.py | 18 ++++++++++++++++++ .../rtdip_sdk/pipelines/deploy/interfaces.py | 4 ++++ .../pipelines/deploy/test_databricks_deploy.py | 17 +++++++++++++++++ 3 files changed, 39 insertions(+) diff --git a/src/sdk/python/rtdip_sdk/pipelines/deploy/databricks.py b/src/sdk/python/rtdip_sdk/pipelines/deploy/databricks.py index 42e08b264..12f0cc9cd 100644 --- a/src/sdk/python/rtdip_sdk/pipelines/deploy/databricks.py +++ b/src/sdk/python/rtdip_sdk/pipelines/deploy/databricks.py @@ -275,3 +275,21 @@ def launch(self): raise ValueError("Job not found in Databricks Workflows") return True + + def stop(self): + """ + Cancels an RTDIP Pipeline Job in Databricks Workflows. This will perform the equivalent of a `Cancel All Runs` in Databricks Workflows + """ + workspace_client = WorkspaceClient( + host=self.host, token=self.token, auth_type="pat" + ) + job_found = False + for existing_job in workspace_client.jobs.list(name=self.databricks_job.name): + workspace_client.jobs.cancel_all_runs(job_id=existing_job.job_id) + job_found = True + break + + if job_found == False: + raise ValueError("Job not found in Databricks Workflows") + + return True diff --git a/src/sdk/python/rtdip_sdk/pipelines/deploy/interfaces.py b/src/sdk/python/rtdip_sdk/pipelines/deploy/interfaces.py index 335e91a2b..8e1bedd04 100644 --- a/src/sdk/python/rtdip_sdk/pipelines/deploy/interfaces.py +++ b/src/sdk/python/rtdip_sdk/pipelines/deploy/interfaces.py @@ -23,3 +23,7 @@ def deploy(self): @abstractmethod def launch(self): pass + + @abstractmethod + def stop(self): + pass diff --git a/tests/sdk/python/rtdip_sdk/pipelines/deploy/test_databricks_deploy.py b/tests/sdk/python/rtdip_sdk/pipelines/deploy/test_databricks_deploy.py index 2104461cc..1be192f9e 100644 --- a/tests/sdk/python/rtdip_sdk/pipelines/deploy/test_databricks_deploy.py +++ b/tests/sdk/python/rtdip_sdk/pipelines/deploy/test_databricks_deploy.py @@ -65,6 +65,9 @@ def reset(self, job_id=None, new_settings=None): def run_now(self, job_id=None): return None + def cancel_all_runs(self, job_id=None): + return None + def list(self, name=None, job_id=None): return [self] @@ -166,6 +169,13 @@ def test_pipeline_job_deploy(mocker: MockerFixture): launch_result = databricks_job.launch() assert launch_result + mocker.patch(default_list_package, return_value=[DummyJob()]) + mocker.patch( + "databricks.sdk.service.jobs.JobsAPI.cancel_all_runs", return_value=None + ) + launch_result = databricks_job.stop() + assert launch_result + def test_pipeline_job_deploy_fails(mocker: MockerFixture): cluster_list = [] @@ -233,3 +243,10 @@ def test_pipeline_job_deploy_fails(mocker: MockerFixture): mocker.patch("databricks.sdk.service.jobs.JobsAPI.run_now", side_effect=Exception) with pytest.raises(Exception): databricks_job.launch() + + mocker.patch(default_list_package, return_value=[DummyJob()]) + mocker.patch( + "databricks.sdk.service.jobs.JobsAPI.cancel_all_runs", side_effect=Exception + ) + with pytest.raises(Exception): + databricks_job.launch()