Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
43 commits
Select commit Hold shift + click to select a range
0c08896
clickhouse setup
BilalG1 Nov 26, 2025
41287db
fix clickhouse env vars
BilalG1 Nov 26, 2025
1320221
fix docker server test
BilalG1 Nov 26, 2025
dfb0245
fix default access management for docker test
BilalG1 Nov 26, 2025
0077d16
Merge branch 'dev' into clickhouse-setup
BilalG1 Nov 26, 2025
62c6646
merge dev
BilalG1 Dec 1, 2025
90cc6a9
remove unused table
BilalG1 Dec 1, 2025
c999d16
Merge branch 'dev' into clickhouse-setup
BilalG1 Dec 1, 2025
20b597f
Merge branch 'dev' into clickhouse-setup
BilalG1 Dec 8, 2025
73407b3
merge dev
BilalG1 Dec 17, 2025
688c9ce
small fixes
BilalG1 Dec 17, 2025
f6fb8cf
Merge remote-tracking branch 'origin/dev' into clickhouse-setup
BilalG1 Dec 17, 2025
44a3496
fix lint
BilalG1 Dec 17, 2025
63da706
Clickhouse events (#1038)
BilalG1 Dec 18, 2025
7a4bab9
Merge branch 'dev' into clickhouse-setup
BilalG1 Dec 18, 2025
39f94c9
fix frontend build
BilalG1 Dec 18, 2025
beae857
fix lint
BilalG1 Dec 18, 2025
8c030db
add query analytics page
BilalG1 Dec 18, 2025
e622514
stricter user in clickhouse migration
BilalG1 Dec 18, 2025
fae5166
modify settings test, clickhouse fixes
BilalG1 Dec 18, 2025
49c8d0f
various changes
N2D4 Dec 18, 2025
1134883
todos
N2D4 Dec 19, 2025
30a8212
clickhouse error code parsing
BilalG1 Dec 19, 2025
8e72a7b
merge dev
BilalG1 Dec 19, 2025
ba8110f
fix tests
BilalG1 Dec 19, 2025
136b25f
small fixes, use view for ch events
BilalG1 Dec 20, 2025
3760836
fix
BilalG1 Dec 20, 2025
3ff2f4f
merge
BilalG1 Jan 21, 2026
42bfed5
fix build
BilalG1 Jan 21, 2026
b9d72b0
small fix
BilalG1 Jan 21, 2026
9f501c8
Merge remote-tracking branch 'origin/dev' into clickhouse-setup
BilalG1 Jan 21, 2026
39099af
merge dev
BilalG1 Jan 26, 2026
176775d
merge dev
BilalG1 Jan 27, 2026
aa2a5dc
Merge branch 'dev' into clickhouse-setup
BilalG1 Jan 27, 2026
ab30ea3
Merge branch 'dev' into clickhouse-setup
BilalG1 Jan 27, 2026
14c70ef
remove analyzer
BilalG1 Jan 27, 2026
4f1ded7
Merge branch 'clickhouse-setup' of https://github.com/stack-auth/stac…
BilalG1 Jan 27, 2026
55fa597
pnpm lock file
BilalG1 Jan 27, 2026
bf0675f
change wal info port
BilalG1 Jan 27, 2026
54bbcf9
fix wall info port
BilalG1 Jan 27, 2026
e7b4bfc
Merge branch 'dev' into clickhouse-setup
BilalG1 Jan 28, 2026
1d8e9ec
Merge branch 'dev' into clickhouse-setup
BilalG1 Jan 28, 2026
360176f
Merge branch 'dev' into clickhouse-setup
BilalG1 Jan 28, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .github/workflows/docker-server-build-run.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,13 @@ jobs:
docker run -d --name db -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=password -e POSTGRES_DB=stackframe -p 8128:5432 postgres:latest
sleep 5
docker logs db

- name: Setup clickhouse
run: |
docker run -d --name clickhouse -e CLICKHOUSE_DB=analytics -e CLICKHOUSE_USER=stackframe -e CLICKHOUSE_PASSWORD=password -e CLICKHOUSE_DEFAULT_ACCESS_MANAGEMENT=1 -p 8133:8123 clickhouse/clickhouse-server:25.10
sleep 5
docker logs clickhouse


- name: Build Docker image
run: docker build -f docker/server/Dockerfile -t server .
Expand Down
7 changes: 7 additions & 0 deletions apps/backend/.env
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,13 @@ STACK_QSTASH_TOKEN=
STACK_QSTASH_CURRENT_SIGNING_KEY=
STACK_QSTASH_NEXT_SIGNING_KEY=

# Clickhouse
STACK_CLICKHOUSE_URL=# URL of the Clickhouse instance
STACK_CLICKHOUSE_ADMIN_USER=# username of the admin account
STACK_CLICKHOUSE_ADMIN_PASSWORD=# password of the admin account
STACK_CLICKHOUSE_EXTERNAL_PASSWORD=# a randomly generated secure string. The user account will be created automatically


# Misc
STACK_ACCESS_TOKEN_EXPIRATION_TIME=# enter the expiration time for the access token here. Optional, don't specify it for default value
STACK_SETUP_ADMIN_GITHUB_ID=# enter the account ID of the admin user here, and after running the seed script they will be able to access the internal project in the Stack dashboard. Optional, don't specify it for default value
Expand Down
6 changes: 6 additions & 0 deletions apps/backend/.env.development
Original file line number Diff line number Diff line change
Expand Up @@ -73,3 +73,9 @@ STACK_QSTASH_URL=http://localhost:${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}25
STACK_QSTASH_TOKEN=eyJVc2VySUQiOiJkZWZhdWx0VXNlciIsIlBhc3N3b3JkIjoiZGVmYXVsdFBhc3N3b3JkIn0=
STACK_QSTASH_CURRENT_SIGNING_KEY=sig_7kYjw48mhY7kAjqNGcy6cr29RJ6r
STACK_QSTASH_NEXT_SIGNING_KEY=sig_5ZB6DVzB1wjE8S6rZ7eenA8Pdnhs

# Clickhouse
STACK_CLICKHOUSE_URL=http://localhost:${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}36
STACK_CLICKHOUSE_ADMIN_USER=stackframe
STACK_CLICKHOUSE_ADMIN_PASSWORD=PASSWORD-PLACEHOLDER--9gKyMxJeMx
STACK_CLICKHOUSE_EXTERNAL_PASSWORD=PASSWORD-PLACEHOLDER--EZeHscBMzE
2 changes: 2 additions & 0 deletions apps/backend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
"codegen": "pnpm run with-env pnpm run generate-migration-imports && pnpm run with-env bash -c 'if [ \"$STACK_ACCELERATE_ENABLED\" = \"true\" ]; then pnpm run prisma generate --no-engine; else pnpm run codegen-prisma; fi' && pnpm run codegen-docs && pnpm run codegen-route-info",
"codegen:watch": "concurrently -n \"prisma,docs,route-info,migration-imports\" -k \"pnpm run codegen-prisma:watch\" \"pnpm run codegen-docs:watch\" \"pnpm run codegen-route-info:watch\" \"pnpm run generate-migration-imports:watch\"",
"psql-inner": "psql $(echo $STACK_DATABASE_CONNECTION_STRING | sed 's/\\?.*$//')",
"clickhouse": "pnpm run with-env clickhouse-client --host localhost --port ${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}37 --user stackframe --password PASSWORD-PLACEHOLDER--9gKyMxJeMx",
"psql": "pnpm run with-env:dev pnpm run psql-inner",
"prisma-studio": "pnpm run with-env:dev prisma studio --port ${NEXT_PUBLIC_STACK_PORT_PREFIX:-81}06 --browser none",
"prisma:dev": "pnpm run with-env:dev prisma",
Expand All @@ -50,6 +51,7 @@
"dependencies": {
"@ai-sdk/openai": "^1.3.23",
"@aws-sdk/client-s3": "^3.855.0",
"@clickhouse/client": "^1.14.0",
"@node-oauth/oauth2-server": "^5.1.0",
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/api-logs": "^0.53.0",
Expand Down
61 changes: 61 additions & 0 deletions apps/backend/scripts/clickhouse-migrations.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
import { getClickhouseAdminClient } from "@/lib/clickhouse";
import { getEnvVariable } from "@stackframe/stack-shared/dist/utils/env";

export async function runClickhouseMigrations() {
console.log("[Clickhouse] Running Clickhouse migrations...");
const client = getClickhouseAdminClient();
const clickhouseExternalPassword = getEnvVariable("STACK_CLICKHOUSE_EXTERNAL_PASSWORD");
await client.exec({
query: "CREATE USER IF NOT EXISTS limited_user IDENTIFIED WITH sha256_password BY {clickhouseExternalPassword:String}",
query_params: { clickhouseExternalPassword },
});
// todo: create migration files
await client.exec({ query: EXTERNAL_ANALYTICS_DB_SQL });
await client.exec({ query: EVENTS_TABLE_BASE_SQL });
await client.exec({ query: EVENTS_VIEW_SQL });
const queries = [
"REVOKE ALL PRIVILEGES ON *.* FROM limited_user;",
"REVOKE ALL FROM limited_user;",
"GRANT SELECT ON default.events TO limited_user;",
];
await client.exec({
query: "CREATE ROW POLICY IF NOT EXISTS events_project_isolation ON default.events FOR SELECT USING project_id = getSetting('SQL_project_id') AND branch_id = getSetting('SQL_branch_id') TO limited_user",
});
for (const query of queries) {
await client.exec({ query });
}
console.log("[Clickhouse] Clickhouse migrations complete");
await client.close();
}

const EVENTS_TABLE_BASE_SQL = `
CREATE TABLE IF NOT EXISTS analytics_internal.events (
event_type LowCardinality(String),
event_at DateTime64(3, 'UTC'),
data JSON,
project_id String,
branch_id String,
user_id String,
team_id String,
refresh_token_id String,
is_anonymous Boolean,
session_id String,
ip_address String,
created_at DateTime64(3, 'UTC') DEFAULT now64(3)
)
ENGINE MergeTree
PARTITION BY toYYYYMM(event_at)
ORDER BY (project_id, branch_id, event_at);
`;

const EVENTS_VIEW_SQL = `
CREATE OR REPLACE VIEW default.events
SQL SECURITY DEFINER
AS
SELECT *
FROM analytics_internal.events;
`;

const EXTERNAL_ANALYTICS_DB_SQL = `
CREATE DATABASE IF NOT EXISTS analytics_internal;
`;
11 changes: 10 additions & 1 deletion apps/backend/scripts/db-migrations.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,25 @@ import { applyMigrations } from "@/auto-migrations";
import { MIGRATION_FILES_DIR, getMigrationFiles } from "@/auto-migrations/utils";
import { Prisma } from "@/generated/prisma/client";
import { globalPrismaClient, globalPrismaSchema, sqlQuoteIdent } from "@/prisma-client";
import { getEnvVariable } from "@stackframe/stack-shared/dist/utils/env";
import { spawnSync } from "child_process";
import fs from "fs";
import path from "path";
import * as readline from "readline";
import { seed } from "../prisma/seed";
import { getEnvVariable } from "@stackframe/stack-shared/dist/utils/env";
import { runClickhouseMigrations } from "./clickhouse-migrations";
import { getClickhouseAdminClient } from "@/lib/clickhouse";

const getClickhouseClient = () => getClickhouseAdminClient();

const dropSchema = async () => {
await globalPrismaClient.$executeRaw(Prisma.sql`DROP SCHEMA ${sqlQuoteIdent(globalPrismaSchema)} CASCADE`);
await globalPrismaClient.$executeRaw(Prisma.sql`CREATE SCHEMA ${sqlQuoteIdent(globalPrismaSchema)}`);
await globalPrismaClient.$executeRaw(Prisma.sql`GRANT ALL ON SCHEMA ${sqlQuoteIdent(globalPrismaSchema)} TO postgres`);
await globalPrismaClient.$executeRaw(Prisma.sql`GRANT ALL ON SCHEMA ${sqlQuoteIdent(globalPrismaSchema)} TO public`);
const clickhouseClient = getClickhouseClient();
await clickhouseClient.command({ query: "DROP DATABASE IF EXISTS analytics_internal" });
await clickhouseClient.command({ query: "CREATE DATABASE IF NOT EXISTS analytics_internal" });
};


Expand Down Expand Up @@ -163,6 +170,8 @@ const migrate = async (selectedMigrationFiles?: { migrationName: string, sql: st

console.log('='.repeat(60) + '\n');

await runClickhouseMigrations();

return result;
};

Expand Down
120 changes: 120 additions & 0 deletions apps/backend/src/app/api/latest/internal/analytics/query/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
import { getClickhouseExternalClient, getQueryTimingStats, isClickhouseConfigured } from "@/lib/clickhouse";
import { createSmartRouteHandler } from "@/route-handlers/smart-route-handler";
import { KnownErrors } from "@stackframe/stack-shared";
import { adaptSchema, adminAuthTypeSchema, jsonSchema, yupBoolean, yupMixed, yupNumber, yupObject, yupRecord, yupString } from "@stackframe/stack-shared/dist/schema-fields";
import { StackAssertionError } from "@stackframe/stack-shared/dist/utils/errors";
import { Result } from "@stackframe/stack-shared/dist/utils/results";
import { randomUUID } from "crypto";

export const POST = createSmartRouteHandler({
metadata: { hidden: true },
request: yupObject({
auth: yupObject({
type: adminAuthTypeSchema,
tenancy: adaptSchema,
}).defined(),
body: yupObject({
include_all_branches: yupBoolean().default(false),
query: yupString().defined().nonEmpty(),
params: yupRecord(yupString().defined(), yupMixed().defined()).default({}),
timeout_ms: yupNumber().integer().min(1_000).default(10_000),
}).defined(),
}),
response: yupObject({
statusCode: yupNumber().oneOf([200]).defined(),
bodyType: yupString().oneOf(["json"]).defined(),
body: yupObject({
result: jsonSchema.defined(),
stats: yupObject({
cpu_time: yupNumber().defined(),
wall_clock_time: yupNumber().defined(),
}).defined(),
}).defined(),
}),
async handler({ body, auth }) {
if (body.include_all_branches) {
throw new StackAssertionError("include_all_branches is not supported yet");
}
if (!isClickhouseConfigured()) {
throw new StackAssertionError("ClickHouse is not configured");
}
const client = getClickhouseExternalClient();
const queryId = randomUUID();
const resultSet = await Result.fromPromise(client.query({
query: body.query,
query_id: queryId,
query_params: body.params,
clickhouse_settings: {
SQL_project_id: auth.tenancy.project.id,
SQL_branch_id: auth.tenancy.branchId,
max_execution_time: body.timeout_ms / 1000,
readonly: "1",
allow_ddl: 0,
max_result_rows: MAX_RESULT_ROWS.toString(),
max_result_bytes: MAX_RESULT_BYTES.toString(),
result_overflow_mode: "throw",
},
format: "JSONEachRow",
}));

if (resultSet.status === "error") {
const message = getSafeClickhouseErrorMessage(resultSet.error);
if (message === null) {
throw new StackAssertionError("Unknown Clickhouse error", { cause: resultSet.error });
}
throw new KnownErrors.AnalyticsQueryError(message);
}

const rows = await resultSet.data.json<Record<string, unknown>[]>();
const stats = await getQueryTimingStats(client, queryId);

return {
statusCode: 200,
bodyType: "json",
body: {
result: rows,
stats: {
cpu_time: stats.cpu_time_ms,
wall_clock_time: stats.wall_clock_time_ms,
},
},
};
},
});

const SAFE_CLICKHOUSE_ERROR_CODES = [
62, // SYNTAX_ERROR
159, // TIMEOUT_EXCEEDED
164, // READONLY
158, // TOO_MANY_ROWS
396, // TOO_MANY_ROWS_OR_BYTES
636, // CANNOT_EXTRACT_TABLE_STRUCTURE
];

const UNSAFE_CLICKHOUSE_ERROR_CODES = [
36, // BAD_ARGUMENTS
60, // UNKNOWN_TABLE
497, // ACCESS_DENIED
];

const DEFAULT_CLICKHOUSE_ERROR_MESSAGE = "Error during execution of this query.";
const MAX_RESULT_ROWS = 10_000;
const MAX_RESULT_BYTES = 10 * 1024 * 1024;

function getSafeClickhouseErrorMessage(error: unknown): string | null {
if (typeof error !== "object" || error === null || !("code" in error) || typeof error.code !== "string") {
return null;
}
const errorCode = Number(error.code);
if (isNaN(errorCode)) {
return null;
}
const message = "message" in error && typeof error.message === "string" ? error.message : null;
if (SAFE_CLICKHOUSE_ERROR_CODES.includes(errorCode)) {
return message;
}
if (UNSAFE_CLICKHOUSE_ERROR_CODES.includes(errorCode)) {
return DEFAULT_CLICKHOUSE_ERROR_MESSAGE;
}
return null;
}
Loading
Loading