diff --git a/apps/backend/prisma/migrations/20250903080405_workflows/migration.sql b/apps/backend/prisma/migrations/20250903080405_workflows/migration.sql new file mode 100644 index 0000000000..add84a7b9c --- /dev/null +++ b/apps/backend/prisma/migrations/20250903080405_workflows/migration.sql @@ -0,0 +1,81 @@ +-- CreateTable +CREATE TABLE "WorkflowTriggerToken" ( + "tenancyId" UUID NOT NULL, + "id" UUID NOT NULL, + "tokenHash" TEXT NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + "expiresAt" TIMESTAMP(3) NOT NULL, + + CONSTRAINT "WorkflowTriggerToken_pkey" PRIMARY KEY ("tenancyId","id") +); + +-- CreateTable +CREATE TABLE "WorkflowTrigger" ( + "tenancyId" UUID NOT NULL, + "id" UUID NOT NULL, + "executionId" UUID NOT NULL, + "triggerData" JSONB NOT NULL, + "scheduledAt" TIMESTAMP(3), + "output" JSONB, + "error" JSONB, + "compiledWorkflowId" UUID, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + + CONSTRAINT "WorkflowTrigger_pkey" PRIMARY KEY ("tenancyId","id") +); + +-- CreateTable +CREATE TABLE "WorkflowExecution" ( + "tenancyId" UUID NOT NULL, + "id" UUID NOT NULL, + "workflowId" TEXT NOT NULL, + "triggerIds" TEXT[], + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + + CONSTRAINT "WorkflowExecution_pkey" PRIMARY KEY ("tenancyId","id") +); + +-- CreateTable +CREATE TABLE "CurrentlyCompilingWorkflow" ( + "tenancyId" UUID NOT NULL, + "workflowId" TEXT NOT NULL, + "compilationVersion" INTEGER NOT NULL, + "sourceHash" TEXT NOT NULL, + "startedCompilingAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + + CONSTRAINT "CurrentlyCompilingWorkflow_pkey" PRIMARY KEY ("tenancyId","workflowId","compilationVersion","sourceHash") +); + +-- CreateTable +CREATE TABLE "CompiledWorkflow" ( + "tenancyId" UUID NOT NULL, + "id" UUID NOT NULL, + "workflowId" TEXT NOT NULL, + "compilationVersion" INTEGER NOT NULL, + "sourceHash" TEXT NOT NULL, + "compiledCode" TEXT, + "compileError" TEXT, + "compiledAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "registeredTriggers" TEXT[], + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "updatedAt" TIMESTAMP(3) NOT NULL, + + CONSTRAINT "CompiledWorkflow_pkey" PRIMARY KEY ("tenancyId","id") +); + +-- CreateIndex +CREATE UNIQUE INDEX "WorkflowTriggerToken_tenancyId_tokenHash_key" ON "WorkflowTriggerToken"("tenancyId", "tokenHash"); + +-- CreateIndex +CREATE UNIQUE INDEX "CompiledWorkflow_tenancyId_workflowId_compilationVersion_so_key" ON "CompiledWorkflow"("tenancyId", "workflowId", "compilationVersion", "sourceHash"); + +-- AddForeignKey +ALTER TABLE "WorkflowTrigger" ADD CONSTRAINT "WorkflowTrigger_tenancyId_compiledWorkflowId_fkey" FOREIGN KEY ("tenancyId", "compiledWorkflowId") REFERENCES "CompiledWorkflow"("tenancyId", "id") ON DELETE RESTRICT ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "WorkflowTrigger" ADD CONSTRAINT "WorkflowTrigger_tenancyId_executionId_fkey" FOREIGN KEY ("tenancyId", "executionId") REFERENCES "WorkflowExecution"("tenancyId", "id") ON DELETE RESTRICT ON UPDATE CASCADE; diff --git a/apps/backend/prisma/schema.prisma b/apps/backend/prisma/schema.prisma index b07257fcad..374da8a1cf 100644 --- a/apps/backend/prisma/schema.prisma +++ b/apps/backend/prisma/schema.prisma @@ -24,7 +24,7 @@ model Project { fullLogoUrl String? projectConfigOverride Json? - stripeAccountId String? + stripeAccountId String? apiKeySets ApiKeySet[] projectUsers ProjectUser[] @@ -773,15 +773,109 @@ model ItemQuantityChange { } model DataVaultEntry { - id String @default(uuid()) @db.Uuid - tenancyId String @db.Uuid - storeId String - hashedKey String - encrypted Json // Contains { edkBase64, ciphertextBase64 } from encryptWithKms() - createdAt DateTime @default(now()) - updatedAt DateTime @updatedAt + id String @default(uuid()) @db.Uuid + tenancyId String @db.Uuid + storeId String + hashedKey String + encrypted Json // Contains { edkBase64, ciphertextBase64 } from encryptWithKms() + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt @@id([tenancyId, id]) @@unique([tenancyId, storeId, hashedKey]) @@index([tenancyId, storeId]) } + +model WorkflowTriggerToken { + tenancyId String @db.Uuid + id String @default(uuid()) @db.Uuid + + tokenHash String + + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + expiresAt DateTime + + @@id([tenancyId, id]) + @@unique([tenancyId, tokenHash]) +} + +model WorkflowTrigger { + tenancyId String @db.Uuid + id String @default(uuid()) @db.Uuid + executionId String @db.Uuid + + triggerData Json + + // the following fields determine the state of the trigger: + // - scheduledAt && !compiledWorkflowId && !output && !error: the trigger is scheduled to be executed + // - !scheduledAt && compiledWorkflowId && !output && !error: the trigger is currently executing + // - !scheduledAt && compiledWorkflowId && output && !error: the trigger has successfully completed execution + // - !scheduledAt && compiledWorkflowId && !output && error: the trigger has failed execution + // All other combinations are invalid. + scheduledAt DateTime? + output Json? + error Json? + compiledWorkflowId String? @db.Uuid + compiledWorkflow CompiledWorkflow? @relation(fields: [tenancyId, compiledWorkflowId], references: [tenancyId, id]) + + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + execution WorkflowExecution @relation(fields: [tenancyId, executionId], references: [tenancyId, id]) + + @@id([tenancyId, id]) +} + +model WorkflowExecution { + tenancyId String @db.Uuid + id String @default(uuid()) @db.Uuid + + workflowId String + + triggerIds String[] + triggers WorkflowTrigger[] + + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + @@id([tenancyId, id]) +} + +model CurrentlyCompilingWorkflow { + tenancyId String @db.Uuid + workflowId String + compilationVersion Int + sourceHash String + + startedCompilingAt DateTime @default(now()) + + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + @@id([tenancyId, workflowId, compilationVersion, sourceHash]) +} + +model CompiledWorkflow { + tenancyId String @db.Uuid + id String @default(uuid()) @db.Uuid + workflowId String // note: The workflow with this ID may have been edited or deleted in the meantime, so there may be multiple CompiledWorkflows with the same workflowId + compilationVersion Int + sourceHash String + + // exactly one of [compiledCode, compileError] must be set + compiledCode String? + compileError String? + + compiledAt DateTime @default(now()) + registeredTriggers String[] + + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + workflowTriggers WorkflowTrigger[] + + @@id([tenancyId, id]) + @@unique([tenancyId, workflowId, compilationVersion, sourceHash]) +} diff --git a/apps/backend/src/app/api/latest/users/crud.tsx b/apps/backend/src/app/api/latest/users/crud.tsx index 9a26dbb085..d7ebfeebf4 100644 --- a/apps/backend/src/app/api/latest/users/crud.tsx +++ b/apps/backend/src/app/api/latest/users/crud.tsx @@ -5,6 +5,7 @@ import { ensureTeamMembershipExists, ensureUserExists } from "@/lib/request-chec import { Tenancy, getSoleTenancyFromProjectBranch, getTenancy } from "@/lib/tenancies"; import { PrismaTransaction } from "@/lib/types"; import { sendTeamMembershipDeletedWebhook, sendUserCreatedWebhook, sendUserDeletedWebhook, sendUserUpdatedWebhook } from "@/lib/webhooks"; +import { triggerWorkflows } from "@/lib/workflows"; import { RawQuery, getPrismaClientForSourceOfTruth, getPrismaClientForTenancy, getPrismaSchemaForSourceOfTruth, getPrismaSchemaForTenancy, globalPrismaClient, rawQuery, retryTransaction, sqlQuoteIdent } from "@/prisma-client"; import { createCrudHandlers } from "@/route-handlers/crud-handler"; import { uploadAndGetUrl } from "@/s3"; @@ -648,6 +649,14 @@ export const usersCrudHandlers = createLazyProxy(() => createCrudHandlers(usersC await createPersonalTeamIfEnabled(prisma, auth.tenancy, result); + // if the user is not an anonymous user, trigger onSignUp workflows + if (!result.is_anonymous) { + await triggerWorkflows(auth.tenancy, { + type: "sign-up", + userId: result.id, + }); + } + runAsynchronouslyAndWaitUntil(sendUserCreatedWebhook({ projectId: auth.project.id, data: result, @@ -948,8 +957,15 @@ export const usersCrudHandlers = createLazyProxy(() => createCrudHandlers(usersC } } - // if we went from anonymous to non-anonymous, rename the personal team + // if we went from anonymous to non-anonymous: if (oldUser.isAnonymous && data.is_anonymous === false) { + // trigger onSignUp workflows + await triggerWorkflows(auth.tenancy, { + type: "sign-up", + userId: params.user_id, + }); + + // rename the personal team await tx.team.updateMany({ where: { tenancyId: auth.tenancy.id, diff --git a/apps/backend/src/lib/email-rendering.tsx b/apps/backend/src/lib/email-rendering.tsx index b58622e0c1..9a49ec960a 100644 --- a/apps/backend/src/lib/email-rendering.tsx +++ b/apps/backend/src/lib/email-rendering.tsx @@ -1,6 +1,5 @@ import { Freestyle } from '@/lib/freestyle'; import { emptyEmailTheme } from '@stackframe/stack-shared/dist/helpers/emails'; -import { getEnvVariable, getNodeEnvironment } from '@stackframe/stack-shared/dist/utils/env'; import { StackAssertionError } from '@stackframe/stack-shared/dist/utils/errors'; import { bundleJavaScript } from '@stackframe/stack-shared/dist/utils/esbuild'; import { get, has } from '@stackframe/stack-shared/dist/utils/objects'; @@ -53,7 +52,6 @@ export async function renderEmailWithTemplate( previewMode?: boolean, }, ): Promise> { - const apiKey = getEnvVariable("STACK_FREESTYLE_API_KEY"); const variables = options.variables ?? {}; const previewMode = options.previewMode ?? false; const user = (previewMode && !options.user) ? { displayName: "John Doe" } : options.user; @@ -114,17 +112,17 @@ export async function renderEmailWithTemplate( return Result.error(result.error); } - const freestyle = new Freestyle({ apiKey }); + const freestyle = new Freestyle(); const nodeModules = { "react": "19.1.1", "@react-email/components": "0.1.1", "arktype": "2.1.20", }; const output = await freestyle.executeScript(result.data, { nodeModules }); - if ("error" in output) { - return Result.error(output.error as string); + if (output.status === "error") { + return Result.error(`${output.error}`); } - return Result.ok(output.result as { html: string, text: string, subject: string, notificationCategory: string }); + return Result.ok(output.data.result as { html: string, text: string, subject: string, notificationCategory: string }); } diff --git a/apps/backend/src/lib/freestyle.tsx b/apps/backend/src/lib/freestyle.tsx index b6b0ea6ceb..d038973c8f 100644 --- a/apps/backend/src/lib/freestyle.tsx +++ b/apps/backend/src/lib/freestyle.tsx @@ -1,18 +1,24 @@ import { traceSpan } from '@/utils/telemetry'; -import { getNodeEnvironment } from '@stackframe/stack-shared/dist/utils/env'; -import { StackAssertionError, captureError, errorToNiceString } from '@stackframe/stack-shared/dist/utils/errors'; +import { getEnvVariable, getNodeEnvironment } from '@stackframe/stack-shared/dist/utils/env'; +import { StackAssertionError } from '@stackframe/stack-shared/dist/utils/errors'; +import { parseJson } from '@stackframe/stack-shared/dist/utils/json'; +import { Result } from '@stackframe/stack-shared/dist/utils/results'; import { FreestyleSandboxes } from 'freestyle-sandboxes'; export class Freestyle { private freestyle: FreestyleSandboxes; - constructor(options: { apiKey: string }) { + constructor(options: { apiKey?: string } = {}) { + const apiKey = options.apiKey || getEnvVariable("STACK_FREESTYLE_API_KEY"); let baseUrl = undefined; - if (["development", "test"].includes(getNodeEnvironment()) && options.apiKey === "mock_stack_freestyle_key") { + if (apiKey === "mock_stack_freestyle_key") { + if (!["development", "test"].includes(getNodeEnvironment())) { + throw new StackAssertionError("Mock Freestyle key used in production; please set the STACK_FREESTYLE_API_KEY environment variable."); + } baseUrl = "http://localhost:8122"; } this.freestyle = new FreestyleSandboxes({ - apiKey: options.apiKey, + apiKey, baseUrl, }); } @@ -27,10 +33,15 @@ export class Freestyle { } }, async () => { try { - return await this.freestyle.executeScript(script, options); - } catch (error) { - captureError("freestyle.executeScript", error); - throw new StackAssertionError("Error executing script with Freestyle! " + errorToNiceString(error), { cause: error }); + const res = await this.freestyle.executeScript(script, options); + return Result.ok(res); + } catch (e: unknown) { + // for whatever reason, Freestyle's errors are sometimes returned in JSON.parse(e.error.error).error (lol) + const wrap1 = e && typeof e === "object" && "error" in e ? e.error : e; + const wrap2 = wrap1 && typeof wrap1 === "object" && "error" in wrap1 ? wrap1.error : wrap1; + const wrap3 = wrap2 && typeof wrap2 === "string" ? Result.or(parseJson(wrap2), wrap2) : wrap2; + const wrap4 = wrap3 && typeof wrap3 === "object" && "error" in wrap3 ? wrap3.error : wrap3; + return Result.error(`${wrap4}`); } }); } diff --git a/apps/backend/src/lib/workflows.tsx b/apps/backend/src/lib/workflows.tsx new file mode 100644 index 0000000000..de084ef89e --- /dev/null +++ b/apps/backend/src/lib/workflows.tsx @@ -0,0 +1,515 @@ +import { getPrismaClientForTenancy, globalPrismaClient, retryTransaction } from "@/prisma-client"; +import { traceSpan } from "@/utils/telemetry"; +import { allPromisesAndWaitUntilEach, runAsynchronouslyAndWaitUntil } from "@/utils/vercel"; +import { CompiledWorkflow, Prisma } from "@prisma/client"; +import { isStringArray } from "@stackframe/stack-shared/dist/utils/arrays"; +import { encodeBase64 } from "@stackframe/stack-shared/dist/utils/bytes"; +import { generateSecureRandomString, hash } from "@stackframe/stack-shared/dist/utils/crypto"; +import { getEnvVariable } from "@stackframe/stack-shared/dist/utils/env"; +import { StackAssertionError, captureError, errorToNiceString, throwErr } from "@stackframe/stack-shared/dist/utils/errors"; +import { bundleJavaScript } from "@stackframe/stack-shared/dist/utils/esbuild"; +import { runAsynchronously, timeout, wait } from "@stackframe/stack-shared/dist/utils/promises"; +import { Result } from "@stackframe/stack-shared/dist/utils/results"; +import { generateUuid } from "@stackframe/stack-shared/dist/utils/uuids"; +import { Freestyle } from "./freestyle"; +import { Tenancy } from "./tenancies"; + +const externalPackages = { + '@stackframe/stack': 'latest', +}; + +type WorkflowRegisteredTriggerType = "sign-up"; + +type WorkflowTrigger = + | { + type: "sign-up", + userId: string, + } + | { + type: "compile", + } + | { + type: "callback", + callbackId: string, + scheduledAtMillis: number, + data: unknown, + callerTriggerId: string, + executionId: string, + }; + +async function hashWorkflowSource(source: string) { + return encodeBase64(await hash({ + purpose: "stack-auth-workflow-source", + value: JSON.stringify(source), + })); +} + +export async function hashWorkflowTriggerToken(token: string) { + return encodeBase64(await hash({ + purpose: "stack-auth-workflow-trigger-token", + value: token, + })); +} + +export async function compileWorkflowSource(source: string): Promise> { + const bundleResult = await bundleJavaScript({ + "/source.tsx": source, + "/entry.js": ` + import { StackServerApp } from '@stackframe/stack'; + + export default async () => { + globalThis.stackApp = new StackServerApp({ + tokenStore: null, + extraRequestHeaders: { + "x-stack-workflow-token": process.env.STACK_WORKFLOW_TOKEN_SECRET, + } + }); + + const registeredTriggers = new Map(); + globalThis._registerTrigger = (triggerType, func) => { + registeredTriggers.set(triggerType, func); + }; + _registerTrigger("compile", () => ({ + registeredTriggers: [...registeredTriggers.keys()], + })); + + const registeredCallbacks = new Map(); + globalThis.registerCallback = (callbackId, func) => { + registeredCallbacks.set(callbackId, func); + }; + _registerTrigger("callback", ({ callbackId, data }) => { + const callbackFunc = registeredCallbacks.get(callbackId); + if (!callbackFunc) { + throw new Error(\`Callback \${callbackId} not found. Was it maybe deleted from the workflow?\`); + } + return callbackFunc(JSON.parse(data.dataJson)); + }); + let scheduledCallback = undefined; + globalThis.scheduleCallback = ({ callbackId, data, scheduleAt }) => { + if (scheduledCallback) { + throw new Error("Only one callback can be scheduled at a time!"); + } + scheduledCallback = { callbackId, data, scheduleAtMillis: scheduleAt.getTime() }; + return scheduledCallback; + }; + + function makeTriggerRegisterer(str, typeCb, argsCb) { + globalThis[str] = (...args) => _registerTrigger(typeCb(...args.slice(0, -1)), async (data) => args[args.length - 1](...await argsCb(data))); + } + + makeTriggerRegisterer("onSignUp", () => "sign-up", async (data) => [await stackApp.getUser(data.userId, { or: "throw" })]); + + await import("./source.tsx"); + + const triggerData = JSON.parse(process.env.STACK_WORKFLOW_TRIGGER_DATA); + const trigger = registeredTriggers.get(triggerData.type); + if (!trigger) { + throw new Error(\`Workflow trigger \${triggerData.type} invoked but not found. Please report this to the developers.\`); + } + const triggerOutput = await trigger(triggerData); + if (scheduledCallback !== undefined) { + if (triggerOutput !== scheduledCallback) { + throw new Error("When calling scheduleCallback, you must return its return value in the event handler!"); + } + return { + scheduledCallback: triggerOutput, + }; + } else { + return { + triggerOutput, + }; + } + } + `, + }, { + format: 'esm', + keepAsImports: Object.keys(externalPackages), + }); + if (bundleResult.status === "error") { + return Result.error(bundleResult.error); + } + return Result.ok(bundleResult.data); +} + +async function compileWorkflow(tenancy: Tenancy, workflowId: string): Promise> { + return await traceSpan(`compileWorkflow ${workflowId}`, async () => { + if (!(workflowId in tenancy.config.workflows.availableWorkflows)) { + throw new StackAssertionError(`Workflow ${workflowId} not found`); + } + const workflow = tenancy.config.workflows.availableWorkflows[workflowId]; + const res = await timeout(async () => { + const compiledCodeResult = await compileWorkflowSource(workflow.tsSource); + if (compiledCodeResult.status === "error") { + return Result.error({ compileError: `Failed to compile workflow: ${compiledCodeResult.error}` }); + } + + const compileTriggerResult = await triggerWorkflowRaw(tenancy, compiledCodeResult.data, { + type: "compile", + }); + if (compileTriggerResult.status === "error") { + return Result.error({ compileError: `Failed to initialize workflow: ${compileTriggerResult.error}` }); + } + const compileTriggerOutputResult = compileTriggerResult.data; + if (typeof compileTriggerOutputResult !== "object" || !compileTriggerOutputResult || !("triggerOutput" in compileTriggerOutputResult)) { + captureError("workflows-compile-trigger-output", new StackAssertionError(`Failed to parse compile trigger output`, { compileTriggerOutputResult })); + return Result.error({ compileError: `Failed to parse compile trigger output` }); + } + const registeredTriggers = (compileTriggerOutputResult.triggerOutput as any)?.registeredTriggers; + if (!isStringArray(registeredTriggers)) { + captureError("workflows-compile-trigger-output", new StackAssertionError(`Failed to parse compile trigger output, should be array of strings`, { compileTriggerOutputResult })); + return Result.error({ compileError: `Failed to parse compile trigger output, should be array of strings` }); + } + + return Result.ok({ + compiledCode: compiledCodeResult.data, + registeredTriggers: registeredTriggers, + }); + }, 10_000); + + if (res.status === "error") { + return Result.error({ compileError: `Timed out compiling workflow ${workflowId} after ${res.error.ms}ms` }); + } + return res.data; + }); +} + +import.meta.vitest?.test("compileWorkflow", async ({ expect }) => { + const compileAndGetResult = async (tsSource: string) => { + const tenancy = { + id: "01234567-89ab-cdef-0123-456789abcdef", + project: { + id: "test-project", + }, + config: { + workflows: { + availableWorkflows: { + "test-workflow": { + enabled: true, + tsSource, + }, + }, + }, + }, + }; + + return await compileWorkflow(tenancy as any, "test-workflow"); + }; + const compileAndGetRegisteredTriggers = async (tsSource: string) => { + const res = await compileAndGetResult(tsSource); + if (res.status === "error") throw new StackAssertionError(`Failed to compile workflow: ${errorToNiceString(res.error)}`, { cause: res.error }); + return res.data.registeredTriggers; + }; + + expect(await compileAndGetRegisteredTriggers("console.log('hello, world!');")).toEqual([ + "compile", + "callback", + ]); + expect(await compileAndGetRegisteredTriggers("onSignUp(() => {}); registerCallback('test', () => {});")).toEqual([ + "compile", + "callback", + "sign-up", + ]); + expect(await compileAndGetResult("return return return return;")).toMatchInlineSnapshot(` + { + "error": { + "compileError": "Failed to compile workflow: Build failed with 1 error: + virtual:/source.tsx:1:7: ERROR: Unexpected "return"", + }, + "status": "error", + } + `); + expect(await compileAndGetResult("console.log('hello, world!'); throw new Error('test');")).toMatchInlineSnapshot(` + { + "error": { + "compileError": "Failed to initialize workflow: test", + }, + "status": "error", + } + `); +}); + +async function compileAndGetEnabledWorkflows(tenancy: Tenancy): Promise> { + const compilationVersion = 1; + const enabledWorkflows = new Map(await Promise.all(Object.entries(tenancy.config.workflows.availableWorkflows) + .filter(([_, workflow]) => workflow.enabled) + .map(async ([workflowId, workflow]) => [workflowId, { + id: workflowId, + workflow, + sourceHash: await hashWorkflowSource(workflow.tsSource), + }] as const))); + + const getWorkflowsToCompile = async (tx: Prisma.TransactionClient) => { + const compiledWorkflows = await tx.compiledWorkflow.findMany({ + where: { + tenancyId: tenancy.id, + workflowId: { in: [...enabledWorkflows.keys()] }, + compilationVersion, + sourceHash: { in: [...enabledWorkflows.values()].map(({ sourceHash }) => sourceHash) }, + }, + }); + + const found = new Map(); + const missing = new Set(enabledWorkflows.keys()); + for (const compiledWorkflow of compiledWorkflows) { + const enabledWorkflow = enabledWorkflows.get(compiledWorkflow.workflowId) ?? throwErr(`Compiled workflow ${compiledWorkflow.workflowId} not found in enabled workflows — this should not happen due to our Prisma filter!`); + if (enabledWorkflow.sourceHash === compiledWorkflow.sourceHash) { + found.set(compiledWorkflow.workflowId, compiledWorkflow); + missing.delete(compiledWorkflow.workflowId); + } + } + + const toCompile: string[] = []; + const waiting: string[] = []; + for (const workflowId of missing) { + const enabledWorkflow = enabledWorkflows.get(workflowId) ?? throwErr(`Enabled workflow ${workflowId} not found in enabled workflows — this should not happen due to our Prisma filter!`); + const currentlyCompiling = await tx.currentlyCompilingWorkflow.findUnique({ + where: { + tenancyId_workflowId_compilationVersion_sourceHash: { + tenancyId: tenancy.id, + workflowId, + compilationVersion, + sourceHash: enabledWorkflow.sourceHash, + }, + }, + }); + if (currentlyCompiling) { + waiting.push(workflowId); + } else { + toCompile.push(workflowId); + } + } + + if (toCompile.length > 0) { + await tx.currentlyCompilingWorkflow.createMany({ + data: toCompile.map((workflowId) => ({ + tenancyId: tenancy.id, + compilationVersion, + workflowId, + sourceHash: enabledWorkflows.get(workflowId)?.sourceHash ?? throwErr(`Enabled workflow ${workflowId} not found in enabled workflows — this should not happen due to our Prisma filter!`), + })), + }); + } + + return { + toCompile, + waiting, + workflows: found, + }; + }; + + let retryInfo = []; + const prisma = await getPrismaClientForTenancy(tenancy); + for (let retries = 0; retries < 10; retries++) { + const todo = await retryTransaction(prisma, async (tx) => { + return await getWorkflowsToCompile(tx); + }, { level: "serializable" }); + + retryInfo.push({ + toCompile: todo.toCompile, + waiting: todo.waiting, + done: [...todo.workflows.entries()].map(([workflowId, workflow]) => workflowId), + }); + + if (todo.toCompile.length === 0 && todo.waiting.length === 0) { + return todo.workflows; + } + + await allPromisesAndWaitUntilEach(todo.toCompile.map(async (workflowId) => { + const enabledWorkflow = enabledWorkflows.get(workflowId) ?? throwErr(`Enabled workflow ${workflowId} not found in enabled workflows — this should not happen due to our Prisma filter!`); + try { + const compiledWorkflow = await compileWorkflow(tenancy, workflowId); + await prisma.compiledWorkflow.create({ + data: { + tenancyId: tenancy.id, + compilationVersion, + workflowId, + sourceHash: enabledWorkflow.sourceHash, + ...compiledWorkflow.status === "ok" ? { + compiledCode: compiledWorkflow.data.compiledCode, + registeredTriggers: compiledWorkflow.data.registeredTriggers, + } : { + compileError: compiledWorkflow.error.compileError, + registeredTriggers: [], + }, + }, + }); + console.log(`Compiled workflow ${workflowId}`); + } finally { + await prisma.currentlyCompilingWorkflow.delete({ + where: { + tenancyId_workflowId_compilationVersion_sourceHash: { + tenancyId: tenancy.id, + compilationVersion, + workflowId, + sourceHash: enabledWorkflow.sourceHash, + }, + }, + }); + } + })); + + const { count } = await prisma.currentlyCompilingWorkflow.deleteMany({ + where: { + tenancyId: tenancy.id, + startedCompilingAt: { lt: new Date(Date.now() - 20_000) }, + }, + }); + if (count > 0) { + captureError("workflows-compile-timeout", new StackAssertionError(`Deleted ${count} currently compiling workflows that were compiling for more than 20 seconds; this probably indicates a bug in the workflow compilation code`)); + } + + await wait(1000); + } + + throw new StackAssertionError(`Timed out compiling workflows after retries`, { retryInfo }); +} + +async function triggerWorkflowRaw(tenancy: Tenancy, compiledWorkflowCode: string, trigger: WorkflowTrigger): Promise> { + const workflowToken = generateSecureRandomString(); + const workflowTriggerToken = await globalPrismaClient.workflowTriggerToken.create({ + data: { + expiresAt: new Date(Date.now() + 1000 * 35), + tenancyId: tenancy.id, + tokenHash: await hashWorkflowTriggerToken(workflowToken), + }, + }); + + const tokenRefreshInterval = setInterval(() => { + runAsynchronously(async () => { + await globalPrismaClient.workflowTriggerToken.update({ + where: { + tenancyId_id: { + tenancyId: tenancy.id, + id: workflowTriggerToken.id, + }, + }, + data: { expiresAt: new Date(Date.now() + 1000 * 35) }, + }); + }); + }, 10_000); + + try { + const freestyle = new Freestyle(); + const freestyleRes = await freestyle.executeScript(compiledWorkflowCode, { + envVars: { + STACK_WORKFLOW_TRIGGER_DATA: JSON.stringify(trigger), + NEXT_PUBLIC_STACK_PROJECT_ID: tenancy.project.id, + NEXT_PUBLIC_STACK_API_URL: getEnvVariable("NEXT_PUBLIC_STACK_API_URL").replace("http://localhost", "http://host.docker.internal"), // the replace is a hardcoded hack for the Freestyle mock server + NEXT_PUBLIC_STACK_PUBLISHABLE_CLIENT_KEY: "", + STACK_SECRET_SERVER_KEY: "", + STACK_WORKFLOW_TOKEN_SECRET: workflowToken, + }, + nodeModules: Object.fromEntries(Object.entries(externalPackages).map(([packageName, version]) => [packageName, version])), + }); + return Result.map(freestyleRes, (data) => data.result); + } finally { + clearInterval(tokenRefreshInterval); + } +} + +async function createScheduledTrigger(tenancy: Tenancy, workflowId: string, trigger: WorkflowTrigger, scheduledAt: Date) { + const executionId = trigger.type === "callback" ? trigger.executionId : generateUuid(); + + const prisma = await getPrismaClientForTenancy(tenancy); + const dbTrigger = await prisma.workflowTrigger.create({ + data: { + triggerData: trigger as any, + scheduledAt, + execution: { + connectOrCreate: { + where: { + tenancyId_id: { + tenancyId: tenancy.id, + id: executionId, + }, + }, + create: { + tenancyId: tenancy.id, + workflowId, + }, + }, + }, + }, + }); + return dbTrigger; +} + +async function triggerWorkflow(tenancy: Tenancy, compiledWorkflow: CompiledWorkflow, triggerId: string): Promise> { + if (compiledWorkflow.compiledCode === null) { + return Result.error(`Workflow ${compiledWorkflow.id} failed to compile: ${compiledWorkflow.compileError}`); + } + + const prisma = await getPrismaClientForTenancy(tenancy); + const trigger = await prisma.workflowTrigger.update({ + where: { + tenancyId_id: { + tenancyId: tenancy.id, + id: triggerId, + }, + }, + data: { + compiledWorkflowId: compiledWorkflow.id, + scheduledAt: null, + output: Prisma.DbNull, + error: Prisma.DbNull, + }, + }); + + const res = await triggerWorkflowRaw(tenancy, compiledWorkflow.compiledCode, trigger.triggerData as WorkflowTrigger); + if (res.status === "error") { + console.log(`Compiled workflow failed to process trigger: ${res.error}`, { trigger, compiledWorkflowId: compiledWorkflow.id, res }); + } else { + if (res.data && typeof res.data === "object" && "scheduledCallback" in res.data && res.data.scheduledCallback && typeof res.data.scheduledCallback === "object") { + const scheduledCallback: any = res.data.scheduledCallback; + const callbackId = `${scheduledCallback.callbackId}`; + const scheduleAt = new Date(scheduledCallback.scheduleAtMillis); + const callbackData = scheduledCallback.data; + await createScheduledTrigger( + tenancy, + compiledWorkflow.id, + { + type: "callback", + callbackId, + data: callbackData, + scheduledAtMillis: scheduleAt.getTime(), + callerTriggerId: triggerId, + executionId: trigger.executionId, + }, + scheduleAt + ); + } + } + await prisma.workflowTrigger.update({ + where: { + tenancyId_id: { + tenancyId: tenancy.id, + id: triggerId, + }, + }, + data: { + ...res.status === "ok" ? { + output: res.data as any, + } : { + error: res.error, + }, + }, + }); + return Result.ok(undefined); +} + +export async function triggerScheduledCallbacks(tenancy: Tenancy) { + +} + +export async function triggerWorkflows(tenancy: Tenancy, trigger: WorkflowTrigger & { type: WorkflowRegisteredTriggerType }) { + runAsynchronouslyAndWaitUntil(async () => { + const compiledWorkflows = await compileAndGetEnabledWorkflows(tenancy); + const promises = [...compiledWorkflows] + .filter(([_, compiledWorkflow]) => compiledWorkflow.registeredTriggers.includes(trigger.type)) + .map(async ([workflowId, compiledWorkflow]) => { + const dbTrigger = await createScheduledTrigger(tenancy, workflowId, trigger, new Date()); + await triggerWorkflow(tenancy, compiledWorkflow, dbTrigger.id); + }); + await Promise.all(promises); + }); +} diff --git a/apps/backend/src/middleware.tsx b/apps/backend/src/middleware.tsx index b7b5aa792a..1dd940a2f1 100644 --- a/apps/backend/src/middleware.tsx +++ b/apps/backend/src/middleware.tsx @@ -26,6 +26,7 @@ const corsAllowedRequestHeaders = [ 'x-stack-secret-server-key', 'x-stack-super-secret-admin-key', 'x-stack-admin-access-token', + 'x-stack-workflow-token', // User auth 'x-stack-refresh-token', diff --git a/apps/backend/src/prisma-client.tsx b/apps/backend/src/prisma-client.tsx index e651de3ee6..7b138ae380 100644 --- a/apps/backend/src/prisma-client.tsx +++ b/apps/backend/src/prisma-client.tsx @@ -121,9 +121,9 @@ class TransactionErrorThatShouldNotBeRetried extends Error { } } -export async function retryTransaction(client: PrismaClient, fn: (tx: PrismaClientTransaction) => Promise): Promise { - // disable serializable transactions for now, later we may re-add them - const enableSerializable = false as boolean; +export async function retryTransaction(client: PrismaClient, fn: (tx: PrismaClientTransaction) => Promise, options: { level?: "default" | "serializable" } = {}): Promise { + // serializable transactions are currently off by default, later we may turn them on + const enableSerializable = options.level === "serializable"; return await traceSpan('Prisma transaction', async (span) => { const res = await Result.retry(async (attemptIndex) => { @@ -154,7 +154,7 @@ export async function retryTransaction(client: PrismaClient, fn: (tx: PrismaC } return res; }, { - isolationLevel: enableSerializable && attemptIndex < 4 ? Prisma.TransactionIsolationLevel.Serializable : undefined, + isolationLevel: enableSerializable ? Prisma.TransactionIsolationLevel.Serializable : undefined, })); } catch (e) { // we don't want to retry too aggressively here, because the error may have been thrown after the transaction was already committed diff --git a/apps/backend/src/route-handlers/smart-request.tsx b/apps/backend/src/route-handlers/smart-request.tsx index aca6985c48..e6035124eb 100644 --- a/apps/backend/src/route-handlers/smart-request.tsx +++ b/apps/backend/src/route-handlers/smart-request.tsx @@ -6,6 +6,7 @@ import { checkApiKeySet, checkApiKeySetQuery } from "@/lib/internal-api-keys"; import { getProjectQuery, listManagedProjectIds } from "@/lib/projects"; import { DEFAULT_BRANCH_ID, Tenancy, getSoleTenancyFromProjectBranch } from "@/lib/tenancies"; import { decodeAccessToken } from "@/lib/tokens"; +import { hashWorkflowTriggerToken } from "@/lib/workflows"; import { globalPrismaClient, rawQueryAll } from "@/prisma-client"; import { KnownErrors } from "@stackframe/stack-shared"; import { ProjectsCrud } from "@stackframe/stack-shared/dist/interface/crud/projects"; @@ -167,6 +168,7 @@ const parseAuth = withTraceSpan('smart request parseAuth', async (req: NextReque const secretServerKey = req.headers.get("x-stack-secret-server-key"); const superSecretAdminKey = req.headers.get("x-stack-super-secret-admin-key"); const adminAccessToken = req.headers.get("x-stack-admin-access-token"); + const workflowToken = req.headers.get("x-stack-workflow-token"); const accessToken = req.headers.get("x-stack-access-token"); const developmentKeyOverride = req.headers.get("x-stack-development-override-key"); // in development, the internal project's API key can optionally be used to access any project const allowAnonymousUser = req.headers.get("x-stack-allow-anonymous-user") === "true"; @@ -273,8 +275,27 @@ const parseAuth = withTraceSpan('smart request parseAuth', async (req: NextReque const result = await checkApiKeySet("internal", { superSecretAdminKey: developmentKeyOverride }); if (!result) throw new StatusError(401, "Invalid development key override"); } else if (adminAccessToken) { - // TODO put the assertion below into the bundled queries above (not so important because this path is quite rare) + // TODO put this into the bundled queries above (not so important because this path is quite rare) await extractUserFromAdminAccessToken({ token: adminAccessToken, projectId }); // assert that the admin token is valid + } else if (workflowToken) { + // TODO put this into the bundled queries above (not so important because this path is quite rare) + if (requestType === "admin") { + throw new KnownErrors.AdminAuthenticationRequired(); + } + if (!["client", "server"].includes(requestType)) { + throw new StackAssertionError(`Unexpected request type in workflow token auth: ${requestType}. This should never happen because we should've filtered this earlier`); + } + const workflowTokenHash = await hashWorkflowTriggerToken(workflowToken); + const workflowTriggerToken = tenancy ? await globalPrismaClient.workflowTriggerToken.findUnique({ + where: { + tenancyId_tokenHash: { + tenancyId: tenancy.id, + tokenHash: workflowTokenHash, + }, + }, + }) : undefined; + if (!workflowTriggerToken) throw new KnownErrors.WorkflowTokenDoesNotExist(); + if (workflowTriggerToken.expiresAt < new Date()) throw new KnownErrors.WorkflowTokenExpired(); } else { switch (requestType) { case "client": { @@ -288,7 +309,7 @@ const parseAuth = withTraceSpan('smart request parseAuth', async (req: NextReque break; } case "admin": { - if (!superSecretAdminKey) throw new KnownErrors.AdminAuthenticationRequired; + if (!superSecretAdminKey) throw new KnownErrors.AdminAuthenticationRequired(); if (!queriesResults.isAdminKeyValid) throw new KnownErrors.InvalidSuperSecretAdminKey(projectId); break; } @@ -297,6 +318,7 @@ const parseAuth = withTraceSpan('smart request parseAuth', async (req: NextReque } } } + if (!tenancy) { throw new KnownErrors.BranchDoesNotExist(branchId); } diff --git a/apps/backend/src/utils/vercel.tsx b/apps/backend/src/utils/vercel.tsx index 0dc1de32af..1ca40dfa9d 100644 --- a/apps/backend/src/utils/vercel.tsx +++ b/apps/backend/src/utils/vercel.tsx @@ -2,7 +2,15 @@ import { runAsynchronously } from "@stackframe/stack-shared/dist/utils/promises" // eslint-disable-next-line no-restricted-imports import { waitUntil as waitUntilVercel } from "@vercel/functions"; -export function runAsynchronouslyAndWaitUntil(promise: Promise) { +export function runAsynchronouslyAndWaitUntil(promiseOrFunction: Promise | (() => Promise)) { + const promise = typeof promiseOrFunction === "function" ? promiseOrFunction() : promiseOrFunction; runAsynchronously(promise); waitUntilVercel(promise); } + +export async function allPromisesAndWaitUntilEach(promises: Promise[]): Promise { + for (const promise of promises) { + waitUntilVercel(promise); + } + return await Promise.all(promises); +} diff --git a/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/auth-methods/providers.tsx b/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/auth-methods/providers.tsx index 958ad7abe7..600a28a41a 100644 --- a/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/auth-methods/providers.tsx +++ b/apps/dashboard/src/app/(main)/(protected)/projects/[projectId]/auth-methods/providers.tsx @@ -125,6 +125,10 @@ export function ProviderSettingDialog(props: Props & { open: boolean, onClose: ( } + + Existing user accounts will be transferred over automatically when you change the OAuth keys. + + {!form.watch("shared") && ( <> ; } else { innerContent = ( - { + if (workflow && workflow.tsSource) { + setWorkflowContent(workflow.tsSource); + } + }, [workflow]); + + const handleSave = async () => { + setIsLoading(true); + try { + await project.updateConfig({ + [`workflows.availableWorkflows.${workflowId}.tsSource`]: workflowContent + }); + toast({ title: "Workflow saved successfully" }); + } catch (error) { + toast({ title: "Failed to save workflow", variant: "destructive" }); + } finally { + setIsLoading(false); + } + }; + + const handleBack = () => { + router.push(`/projects/${projectId}/workflows`); + }; + + if (workflow === undefined) { + return ( + +
+

The workflow {JSON.stringify(workflowId)} was not found.

+ +
+
+ ); + } + + return ( + + + + + } + > +
+ + + Workflow Definition +

+ {workflow.enabled ? "This workflow is enabled" : "This workflow is disabled"} +

+
+ +
+