Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
97b92e1
Added Databricks SQL Warehouses API actions
Lokeshchand33 Aug 22, 2025
5a697bd
Update Databricks SQL Warehouse docs URLs
Lokeshchand33 Aug 22, 2025
6623be2
Merge branch 'master' into databricks-sql-warehouses
Lokeshchand33 Aug 23, 2025
8343661
Merge branch 'master' into databricks-sql-warehouses
Lokeshchand33 Aug 27, 2025
9292e93
fix(databricks): bump component versions and apply lint fixes
Lokeshchand33 Aug 28, 2025
6a9646c
fix(databricks): addressed requested changes
Lokeshchand33 Aug 29, 2025
d66788b
addressed coderabbit review feedback
Lokeshchand33 Aug 29, 2025
e120588
resolved the linting issues
Lokeshchand33 Aug 29, 2025
5238430
Merge branch 'master' into databricks-sql-warehouses
Lokeshchand33 Aug 30, 2025
e742ec2
addressed all test failures
Lokeshchand33 Sep 1, 2025
01ed509
addressed coderabbit review feedback
Lokeshchand33 Sep 1, 2025
d83d206
Merge branch 'master' into databricks-sql-warehouses
Lokeshchand33 Sep 1, 2025
49e997c
resolved the linting issues
Lokeshchand33 Sep 1, 2025
0535802
addressed coderabbit review feedback
Lokeshchand33 Sep 1, 2025
b98476c
addressed coderabbit review feedback
Lokeshchand33 Sep 1, 2025
2153ac3
resolved the linting issues
Lokeshchand33 Sep 1, 2025
b04a050
updates
michelle0927 Sep 1, 2025
2222816
Add default value for maxNumClusters
vunguyenhung Sep 2, 2025
2aeacf2
create and edit sql warehouses fixes
Lokeshchand33 Sep 2, 2025
9bfe023
create and edit sql warehouse fixes
Lokeshchand33 Sep 2, 2025
99dfc76
updates
michelle0927 Sep 2, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,171 @@
import databricks from "../../databricks.app.mjs";
import constants from "../../common/constants.mjs";
import utils from "../../common/utils.mjs";
import { ConfigurationError } from "@pipedream/platform";

export default {
key: "databricks-create-sql-warehouse",
name: "Create SQL Warehouse",
description: "Creates a new SQL Warehouse in Databricks. [See the documentation](https://docs.databricks.com/api/workspace/warehouses/create)",
version: "0.0.1",
type: "action",
props: {
databricks,
name: {
type: "string",
label: "Warehouse Name",
description: "A human-readable name for the warehouse",
},
clusterSize: {
type: "string",
label: "Cluster Size",
description: "Size of the cluster",
options: constants.CLUSTER_SIZES,
},
autoStopMinutes: {
type: "integer",
label: "Auto Stop (minutes)",
description:
"Minutes of inactivity before auto-stop. 0 disables auto-stop. Must be 0 or ≥ 10.",
optional: true,
default: 10,
},
minNumClusters: {
type: "integer",
label: "Min Number of Clusters",
description: "Minimum number of clusters to maintain (> 0 and ≤ min(max_num_clusters, 30)).",
optional: true,
default: 1,
},
maxNumClusters: {
type: "integer",
label: "Max Number of Clusters",
description: "Maximum number of clusters for autoscaler (≥ min_num_clusters and ≤ 30).",
optional: true,
default: 1,
},
enablePhoton: {
type: "boolean",
label: "Enable Photon",
description: "Whether the warehouse should use Photon optimized clusters.",
optional: true,
},
enableServerlessCompute: {
type: "boolean",
label: "Enable Serverless Compute",
description: "Whether the warehouse should use serverless compute.",
optional: true,
},
warehouseType: {
type: "string",
label: "Warehouse Type",
description:
"Warehouse type: PRO or CLASSIC. Set PRO + enableServerlessCompute = true to use serverless.",
options: [
"TYPE_UNSPECIFIED",
"CLASSIC",
"PRO",
],
optional: true,
},
spotInstancePolicy: {
type: "string",
label: "Spot Instance Policy",
description: "Configures whether the warehouse should use spot instances.",
options: [
"POLICY_UNSPECIFIED",
"COST_OPTIMIZED",
"RELIABILITY_OPTIMIZED",
],
optional: true,
},
channel: {
type: "object",
label: "Channel",
description:
"Channel details. Example: `{ \"name\": \"CHANNEL_NAME_CUSTOM\", \"dbsql_version\": \"2023.35\" }`",
optional: true,
},
tags: {
type: "object",
label: "Tags",
description:
"Custom key-value tags for resources associated with this SQL Warehouse.",
optional: true,
},
instanceProfileArn: {
type: "string",
label: "Instance Profile ARN (Deprecated)",
description: "Deprecated. Instance profile used to pass IAM role to the cluster.",
optional: true,
},
},

async run({ $ }) {
const payload = {
name: this.name,
cluster_size: this.clusterSize,
};

if (this.autoStopMinutes !== undefined) {
if (this.autoStopMinutes !== 0 && this.autoStopMinutes < 10) {
throw new ConfigurationError("autoStopMinutes must be 0 or ≥ 10.");
}
payload.auto_stop_mins = this.autoStopMinutes;
}

const minNumClusters = this.minNumClusters ?? 1;
if (minNumClusters < 1 || minNumClusters > 30) {
throw new ConfigurationError("minNumClusters must be between 1 and 30.");
}
payload.min_num_clusters = minNumClusters;

if (this.maxNumClusters !== undefined) {
if (
this.maxNumClusters < payload.min_num_clusters ||
this.maxNumClusters > 30
) {
throw new ConfigurationError(
`maxNumClusters must be ≥ minNumClusters (${payload.min_num_clusters}) and ≤ 30.`,
);
}
payload.max_num_clusters = this.maxNumClusters;
}

const parsedTags = utils.parseObject(this.tags);
const tagArray = Object.entries(parsedTags).map(([
key,
value,
]) => ({
key,
value,
}));
if (tagArray.length) {
payload.tags = {
custom_tags: tagArray,
};
}

if (this.enablePhoton !== undefined)
payload.enable_photon = this.enablePhoton;
if (this.enableServerlessCompute !== undefined)
payload.enable_serverless_compute = this.enableServerlessCompute;
if (this.warehouseType) payload.warehouse_type = this.warehouseType;
if (this.spotInstancePolicy)
payload.spot_instance_policy = this.spotInstancePolicy;
if (this.channel) payload.channel = utils.parseObject(this.channel);
if (this.instanceProfileArn)
payload.instance_profile_arn = this.instanceProfileArn;

const response = await this.databricks.createSQLWarehouse({
data: payload,
$,
});

$.export(
"$summary",
`Successfully created SQL Warehouse: ${response?.name || this.name}`,
);
return response;
},
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import databricks from "../../databricks.app.mjs";

export default {
key: "databricks-delete-sql-warehouse",
name: "Delete SQL Warehouse",
description: "Deletes a SQL Warehouse by ID. [See the documentation](https://docs.databricks.com/api/workspace/warehouses/delete)",
version: "0.0.1",
type: "action",
props: {
databricks,
warehouseId: {
description: "The ID of the SQL Warehouse to delete",
propDefinition: [
databricks,
"warehouseId",
],
},
},
async run({ $ }) {
await this.databricks.deleteSQLWarehouse({
warehouseId: this.warehouseId,
$,
});

$.export("$summary", `Successfully deleted SQL Warehouse with ID ${this.warehouseId}`);
return {
success: true,
};
},
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,183 @@
import databricks from "../../databricks.app.mjs";
import constants from "../../common/constants.mjs";
import utils from "../../common/utils.mjs";
import { ConfigurationError } from "@pipedream/platform";

export default {
key: "databricks-edit-sql-warehouse",
name: "Edit SQL Warehouse",
description: "Edits the configuration of an existing SQL Warehouse. [See the documentation](https://docs.databricks.com/api/workspace/warehouses/edit)",
version: "0.0.1",
type: "action",
props: {
databricks,
warehouseId: {
description: "The ID of the SQL Warehouse to edit",
propDefinition: [
databricks,
"warehouseId",
],
},
name: {
type: "string",
label: "Warehouse Name",
description: "Logical name for the warehouse. Must be unique within an org and under 100 characters.",
optional: true,
},
clusterSize: {
type: "string",
label: "Cluster Size",
description: "Size of clusters allocated for this warehouse.",
options: constants.CLUSTER_SIZES,
optional: true,
},
autoStopMins: {
type: "integer",
label: "Auto Stop (minutes)",
description: "Minutes of inactivity before auto-stop. 0 disables autostop. Must be 0 or ≥ 10.",
optional: true,
},
minNumClusters: {
type: "integer",
label: "Min Number of Clusters",
description: "Minimum number of available clusters (> 0 and ≤ min(max_num_clusters, 30)).",
optional: true,
},
maxNumClusters: {
type: "integer",
label: "Max Number of Clusters",
description: "Maximum number of clusters for autoscaler (≥ min_num_clusters and ≤ 30).",
optional: true,
},
enablePhoton: {
type: "boolean",
label: "Enable Photon",
description: "Use Photon optimized clusters.",
optional: true,
},
enableServerlessCompute: {
type: "boolean",
label: "Enable Serverless Compute",
description: "Use serverless compute for this warehouse.",
optional: true,
},
warehouseType: {
type: "string",
label: "Warehouse Type",
description: "Set to PRO (recommended) or CLASSIC. Set PRO + enable serverless to use serverless.",
options: [
"TYPE_UNSPECIFIED",
"CLASSIC",
"PRO",
],
optional: true,
},
spotInstancePolicy: {
type: "string",
label: "Spot Instance Policy",
description: "Whether the warehouse should use spot instances.",
options: [
"POLICY_UNSPECIFIED",
"COST_OPTIMIZED",
"RELIABILITY_OPTIMIZED",
],
optional: true,
},
tags: {
type: "object",
label: "Tags",
description: "Key-value tags for all resources associated with this warehouse (fewer than 45 tags).",
optional: true,
},
channel: {
type: "object",
label: "Channel",
description: "Channel details. Example: `{ \"name\": \"CHANNEL_NAME_CUSTOM\", \"dbsql_version\": \"2023.35\" }`",
optional: true,
},
instanceProfileArn: {
type: "string",
label: "Instance Profile ARN (Deprecated)",
description: "Deprecated. Instance profile used to pass IAM role to the cluster.",
optional: true,
},
},
async run({ $ }) {
const payload = {};

if (this.name !== undefined) {
if (typeof this.name !== "string" || this.name.length >= 100) {
throw new ConfigurationError("name must be a string with fewer than 100 characters.");
}
payload.name = this.name;
}
if (this.clusterSize !== undefined) payload.cluster_size = this.clusterSize;

if (this.autoStopMins !== undefined) {
if (this.autoStopMins !== 0 && this.autoStopMins < 10) {
throw new ConfigurationError("autoStopMins must be 0 or >= 10.");
}
payload.auto_stop_mins = this.autoStopMins;
}

if (this.minNumClusters !== undefined) {
if (this.minNumClusters < 1 || this.minNumClusters > 30) {
throw new ConfigurationError("minNumClusters must be between 1 and 30.");
}
payload.min_num_clusters = this.minNumClusters;
}

if (this.maxNumClusters !== undefined) {
if (this.maxNumClusters < 1 || this.maxNumClusters > 30) {
throw new ConfigurationError("maxNumClusters must be between 1 and 30.");
}
if (this.minNumClusters !== undefined && this.maxNumClusters < this.minNumClusters) {
throw new ConfigurationError("maxNumClusters must be >= minNumClusters.");
}
payload.max_num_clusters = this.maxNumClusters;
}

if (this.enablePhoton !== undefined) payload.enable_photon = this.enablePhoton;
if (this.enableServerlessCompute !== undefined) {
if (this.warehouseType === "CLASSIC" && this.enableServerlessCompute) {
throw new ConfigurationError("Serverless compute requires warehouseType = PRO.");
}
payload.enable_serverless_compute = this.enableServerlessCompute;
}

const parsedTags = utils.parseObject(this.tags);
const tagArray = Object.entries(parsedTags).map(([
key,
value,
]) => ({
key,
value,
}));
if (tagArray.length) {
payload.tags = {
custom_tags: tagArray,
};
}
if (this.warehouseType !== undefined) payload.warehouse_type = this.warehouseType;
if (this.spotInstancePolicy !== undefined) {
payload.spot_instance_policy = this.spotInstancePolicy;
}
if (this.channel !== undefined) payload.channel = utils.parseObject(this.channel);
if (this.instanceProfileArn !== undefined) {
payload.instance_profile_arn = this.instanceProfileArn;
}

if (!Object.keys(payload).length) {
throw new ConfigurationError("No fields to update. Provide at least one property.");
}

const response = await this.databricks.editSQLWarehouse({
warehouseId: this.warehouseId,
data: payload,
$,
});

$.export("$summary", `Successfully edited SQL Warehouse ID ${this.warehouseId}`);
return response;
},
};
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ export default {
key: "databricks-get-run-output",
name: "Get Run Output",
description: "Retrieve the output and metadata of a single task run. [See the documentation](https://docs.databricks.com/en/workflows/jobs/jobs-2.0-api.html#runs-get-output)",
version: "0.0.1",
version: "0.0.2",
type: "action",
props: {
databricks,
Expand Down
Loading
Loading